File size: 14,124 Bytes
d864d45
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
import os
from typing import List, Type

import boto3
import pandas as pd

from tools.config import (
    AWS_REGION,
    DOCUMENT_REDACTION_BUCKET,
    RUN_AWS_FUNCTIONS,
    S3_OUTPUTS_BUCKET,
    SAVE_LOGS_TO_CSV,
)
from tools.secure_path_utils import secure_join

PandasDataFrame = Type[pd.DataFrame]


def get_assumed_role_info():
    sts_endpoint = "https://sts." + AWS_REGION + ".amazonaws.com"
    sts = boto3.client("sts", region_name=AWS_REGION, endpoint_url=sts_endpoint)
    response = sts.get_caller_identity()

    # Extract ARN of the assumed role
    assumed_role_arn = response["Arn"]

    # Extract the name of the assumed role from the ARN
    assumed_role_name = assumed_role_arn.split("/")[-1]

    return assumed_role_arn, assumed_role_name


if RUN_AWS_FUNCTIONS:
    try:
        session = boto3.Session(region_name=AWS_REGION)

    except Exception as e:
        print("Could not start boto3 session:", e)

    try:
        assumed_role_arn, assumed_role_name = get_assumed_role_info()

        print("Successfully assumed ARN role")
        # print("Assumed Role ARN:", assumed_role_arn)
        # print("Assumed Role Name:", assumed_role_name)

    except Exception as e:
        print("Could not get assumed role from STS:", e)


# Download direct from S3 - requires login credentials
def download_file_from_s3(
    bucket_name: str,
    key: str,
    local_file_path_and_name: str,
    RUN_AWS_FUNCTIONS: bool = RUN_AWS_FUNCTIONS,
):

    if RUN_AWS_FUNCTIONS:

        try:
            # Ensure the local directory exists
            os.makedirs(os.path.dirname(local_file_path_and_name), exist_ok=True)

            s3 = boto3.client("s3", region_name=AWS_REGION)
            s3.download_file(bucket_name, key, local_file_path_and_name)
            print(
                f"File downloaded from s3://{bucket_name}/{key} to {local_file_path_and_name}"
            )
        except Exception as e:
            print("Could not download file:", key, "from s3 due to", e)


def download_folder_from_s3(
    bucket_name: str,
    s3_folder: str,
    local_folder: str,
    RUN_AWS_FUNCTIONS: bool = RUN_AWS_FUNCTIONS,
):
    """
    Download all files from an S3 folder to a local folder.
    """
    if RUN_AWS_FUNCTIONS:
        if bucket_name and s3_folder and local_folder:

            s3 = boto3.client("s3", region_name=AWS_REGION)

            # List objects in the specified S3 folder
            response = s3.list_objects_v2(Bucket=bucket_name, Prefix=s3_folder)

            # Download each object
            for obj in response.get("Contents", []):
                # Extract object key and construct local file path
                object_key = obj["Key"]
                local_file_path = secure_join(
                    local_folder, os.path.relpath(object_key, s3_folder)
                )

                # Create directories if necessary
                os.makedirs(os.path.dirname(local_file_path), exist_ok=True)

                # Download the object
                try:
                    s3.download_file(bucket_name, object_key, local_file_path)
                    print(
                        f"Downloaded 's3://{bucket_name}/{object_key}' to '{local_file_path}'"
                    )
                except Exception as e:
                    print(f"Error downloading 's3://{bucket_name}/{object_key}':", e)
        else:
            print(
                "One or more required variables are empty, could not download from S3"
            )


def download_files_from_s3(
    bucket_name: str,
    s3_folder: str,
    local_folder: str,
    filenames: List[str],
    RUN_AWS_FUNCTIONS: bool = RUN_AWS_FUNCTIONS,
):
    """
    Download specific files from an S3 folder to a local folder.
    """

    if RUN_AWS_FUNCTIONS:
        if bucket_name and s3_folder and local_folder and filenames:

            s3 = boto3.client("s3", region_name=AWS_REGION)

            print("Trying to download file: ", filenames)

            if filenames == "*":
                # List all objects in the S3 folder
                print("Trying to download all files in AWS folder: ", s3_folder)
                response = s3.list_objects_v2(Bucket=bucket_name, Prefix=s3_folder)

                print("Found files in AWS folder: ", response.get("Contents", []))

                filenames = [
                    obj["Key"].split("/")[-1] for obj in response.get("Contents", [])
                ]

                print("Found filenames in AWS folder: ", filenames)

            for filename in filenames:
                object_key = secure_join(s3_folder, filename)
                local_file_path = secure_join(local_folder, filename)

                # Create directories if necessary
                os.makedirs(os.path.dirname(local_file_path), exist_ok=True)

                # Download the object
                try:
                    s3.download_file(bucket_name, object_key, local_file_path)
                    print(
                        f"Downloaded 's3://{bucket_name}/{object_key}' to '{local_file_path}'"
                    )
                except Exception as e:
                    print(f"Error downloading 's3://{bucket_name}/{object_key}':", e)

        else:
            print(
                "One or more required variables are empty, could not download from S3"
            )


def upload_file_to_s3(
    local_file_paths: List[str],
    s3_key: str,
    s3_bucket: str = DOCUMENT_REDACTION_BUCKET,
    RUN_AWS_FUNCTIONS: bool = RUN_AWS_FUNCTIONS,
):
    """
    Uploads a file from local machine to Amazon S3.

    Args:
    - local_file_path: Local file path(s) of the file(s) to upload.
    - s3_key: Key (path) to the file in the S3 bucket.
    - s3_bucket: Name of the S3 bucket.

    Returns:
    - Message as variable/printed to console
    """
    final_out_message = list()
    final_out_message_str = ""

    if RUN_AWS_FUNCTIONS:
        try:
            if s3_bucket and s3_key and local_file_paths:

                s3_client = boto3.client("s3", region_name=AWS_REGION)

                if isinstance(local_file_paths, str):
                    local_file_paths = [local_file_paths]

                for file in local_file_paths:
                    if s3_client:
                        # print(s3_client)
                        try:
                            # Get file name off file path
                            file_name = os.path.basename(file)

                            s3_key_full = s3_key + file_name
                            # print("S3 key: ", s3_bucket, "/", s3_key_full, sep="")

                            s3_client.upload_file(file, s3_bucket, s3_key_full)
                            out_message = (
                                "File " + file_name + " uploaded successfully!"
                            )

                        except Exception as e:
                            out_message = f"Error uploading file(s): {e}"
                            print(out_message)

                        final_out_message.append(out_message)
                        final_out_message_str = "\n".join(final_out_message)

                    else:
                        final_out_message_str = "Could not connect to AWS."
            else:
                final_out_message_str = (
                    "At least one essential variable is empty, could not upload to S3"
                )
        except Exception as e:
            final_out_message_str = "Could not upload files to S3 due to: " + str(e)
            print(final_out_message_str)
    else:
        final_out_message_str = "App not set to run AWS functions"

    return final_out_message_str


def upload_log_file_to_s3(
    local_file_paths: List[str],
    s3_key: str,
    s3_bucket: str = DOCUMENT_REDACTION_BUCKET,
    RUN_AWS_FUNCTIONS: bool = RUN_AWS_FUNCTIONS,
    SAVE_LOGS_TO_CSV: bool = SAVE_LOGS_TO_CSV,
):
    """
    Uploads a log file from local machine to Amazon S3.

    Args:
    - local_file_path: Local file path(s) of the file(s) to upload.
    - s3_key: Key (path) to the file in the S3 bucket.
    - s3_bucket: Name of the S3 bucket.

    Returns:
    - Message as variable/printed to console
    """
    final_out_message = list()
    final_out_message_str = ""

    if RUN_AWS_FUNCTIONS and SAVE_LOGS_TO_CSV:
        try:
            if s3_bucket and s3_key and local_file_paths:

                s3_client = boto3.client("s3", region_name=AWS_REGION)

                if isinstance(local_file_paths, str):
                    local_file_paths = [local_file_paths]

                for file in local_file_paths:
                    if s3_client:
                        # print(s3_client)
                        try:
                            # Get file name off file path
                            file_name = os.path.basename(file)

                            s3_key_full = s3_key + file_name

                            s3_client.upload_file(file, s3_bucket, s3_key_full)
                            out_message = (
                                "File " + file_name + " uploaded successfully!"
                            )
                            # print(out_message)

                        except Exception as e:
                            out_message = f"Error uploading file(s): {e}"
                            print(out_message)

                        final_out_message.append(out_message)
                        final_out_message_str = "\n".join(final_out_message)

                    else:
                        final_out_message_str = "Could not connect to AWS."
            else:
                final_out_message_str = (
                    "At least one essential variable is empty, could not upload to S3"
                )
        except Exception as e:
            final_out_message_str = "Could not upload files to S3 due to: " + str(e)
            print(final_out_message_str)
    else:
        final_out_message_str = "App not set to run AWS functions"

    return final_out_message_str


# Helper to upload outputs to S3 when enabled in config.
def export_outputs_to_s3(
    file_list_state,
    s3_output_folder_state_value: str,
    save_outputs_to_s3_flag: bool,
    base_file_state=None,
    s3_bucket: str = S3_OUTPUTS_BUCKET,
):
    """
    Upload a list of local output files to the configured S3 outputs folder.

    - file_list_state: Gradio dropdown state that holds a list of file paths or a
        single path/string. If blank/empty, no action is taken.
    - s3_output_folder_state_value: Final S3 key prefix (including any session hash)
        to use as the destination folder for uploads.
    - s3_bucket: Name of the S3 bucket.
    """
    try:

        # Respect the runtime toggle as well as environment configuration
        if not save_outputs_to_s3_flag:
            return

        if not s3_output_folder_state_value:
            # No configured S3 outputs folder – nothing to do
            return

        # Normalise input to a Python list of strings
        file_paths = file_list_state
        if not file_paths:
            return

        # Gradio dropdown may return a single string or a list
        if isinstance(file_paths, str):
            file_paths = [file_paths]

        # Filter out any non-truthy values
        file_paths = [p for p in file_paths if p]
        if not file_paths:
            return

        # Derive a base file stem (name without extension) from the original
        # file(s) being analysed, if provided. This is used to create an
        # additional subfolder layer so that outputs are grouped under the
        # analysed file name rather than under each output file name.
        base_stem = None
        if base_file_state:
            base_path = None

            # Gradio File components typically provide a list of objects with a `.name` attribute
            if isinstance(base_file_state, str):
                base_path = base_file_state
            elif isinstance(base_file_state, list) and base_file_state:
                first_item = base_file_state[0]
                base_path = getattr(first_item, "name", None) or str(first_item)
            else:
                base_path = getattr(base_file_state, "name", None) or str(
                    base_file_state
                )

            if base_path:
                base_name = os.path.basename(base_path)
                base_stem, _ = os.path.splitext(base_name)

        # Ensure base S3 prefix (session/date) ends with a trailing slash
        base_prefix = s3_output_folder_state_value
        if not base_prefix.endswith("/"):
            base_prefix = base_prefix + "/"

        # For each file, append a subfolder. If we have a derived base_stem
        # from the input being analysed, use that; otherwise, fall back to
        # the individual output file name stem. Final pattern:
        #   <session_output_folder>/<date>/<base_file_stem>/<file_name>
        # or, if base_file_stem is not available:
        #   <session_output_folder>/<date>/<output_file_stem>/<file_name>
        for file in file_paths:
            file_name = os.path.basename(file)

            if base_stem:
                folder_stem = base_stem
            else:
                folder_stem, _ = os.path.splitext(file_name)

            per_file_prefix = base_prefix + folder_stem + "/"

            out_message = upload_file_to_s3(
                local_file_paths=[file],
                s3_key=per_file_prefix,
                s3_bucket=s3_bucket,
            )

            # Log any issues to console so failures are visible in logs/stdout
            if (
                "Error uploading file" in out_message
                or "could not upload" in out_message.lower()
            ):
                print("export_outputs_to_s3 encountered issues:", out_message)

        print("Successfully uploaded outputs to S3")

    except Exception as e:
        # Do not break the app flow if S3 upload fails – just report to console
        print(f"export_outputs_to_s3 failed with error: {e}")

    # No GUI outputs to update
    return