Skip to content

Commit da7eb33

Browse files
committed
Change solution for tracking logs (#308)
* Change tracking logs method. * Change version to generate dev package. * Change path name in S3
1 parent fd2bae9 commit da7eb33

File tree

3 files changed

+28
-22
lines changed

3 files changed

+28
-22
lines changed

butterfree/_cli/migrate.py

Lines changed: 25 additions & 20 deletions
Original file line numberDiff line numberDiff line change
@@ -1,17 +1,18 @@
1+
import datetime
12
import importlib
23
import inspect
34
import os
45
import pkgutil
56
import sys
67
from typing import Set
78

9+
import boto3
810
import setuptools
911
import typer
12+
from botocore.exceptions import ClientError
1013

11-
from butterfree.clients import SparkClient
1214
from butterfree.configs import environment
1315
from butterfree.configs.logger import __logger
14-
from butterfree.extract.readers import FileReader
1516
from butterfree.migrations.database_migration import ALLOWED_DATABASE
1617
from butterfree.pipelines import FeatureSetPipeline
1718

@@ -106,30 +107,34 @@ class Migrate:
106107
pipelines: list of Feature Set Pipelines to use to migration.
107108
"""
108109

109-
def __init__(
110-
self, pipelines: Set[FeatureSetPipeline], spark_client: SparkClient = None
111-
) -> None:
110+
def __init__(self, pipelines: Set[FeatureSetPipeline],) -> None:
112111
self.pipelines = pipelines
113-
self.spark_client = spark_client or SparkClient()
114112

115113
def _send_logs_to_s3(self, file_local: bool) -> None:
116114
"""Send all migration logs to S3."""
117-
log_path = "../logging.json"
118-
119-
file_reader = FileReader(id="name", path=log_path, format="json")
120-
df = file_reader.consume(self.spark_client)
121-
122-
path = environment.get_variable("FEATURE_STORE_S3_BUCKET")
123-
124-
self.spark_client.write_dataframe(
125-
dataframe=df,
126-
format_="json",
127-
mode="append",
128-
**{"path": f"s3a://{path}/logging"},
115+
s3_client = boto3.client("s3")
116+
117+
file_name = "../logging.json"
118+
timestamp = datetime.datetime.now()
119+
object_name = (
120+
f"logs/migrate/"
121+
f"{timestamp.strftime('%Y-%m-%d')}"
122+
f"/logging-{timestamp.strftime('%H:%M:%S')}.json"
129123
)
124+
bucket = environment.get_variable("FEATURE_STORE_S3_BUCKET")
125+
126+
try:
127+
s3_client.upload_file(
128+
file_name,
129+
bucket,
130+
object_name,
131+
ExtraArgs={"ACL": "bucket-owner-full-control"},
132+
)
133+
except ClientError:
134+
raise
130135

131-
if not file_local and os.path.exists(log_path):
132-
os.remove(log_path)
136+
if not file_local and os.path.exists(file_name):
137+
os.remove(file_name)
133138

134139
def run(self, generate_logs: bool = False) -> None:
135140
"""Construct and apply the migrations."""

requirements.txt

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -5,4 +5,5 @@ parameters-validation>=1.1.5,<2.0
55
pyspark==3.*
66
typer>=0.3,<0.4
77
setuptools>=41,<42
8-
typing-extensions==3.7.4.3
8+
typing-extensions==3.7.4.3
9+
boto3==1.17.*

setup.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,7 @@
11
from setuptools import find_packages, setup
22

33
__package_name__ = "butterfree"
4-
__version__ = "1.2.0.dev7"
4+
__version__ = "1.2.0.dev8"
55
__repository_url__ = "https://github.com/quintoandar/butterfree"
66

77
with open("requirements.txt") as f:

0 commit comments

Comments
 (0)