Skip to content
Snippets Groups Projects
Commit d168902d authored by Thomas Bär's avatar Thomas Bär
Browse files

Ohne Threads

parent 7b01551d
No related branches found
No related tags found
No related merge requests found
......@@ -2,7 +2,6 @@ import os
import re
from urllib.parse import parse_qs, urlencode
import pandas as pd
import concurrent.futures
import pyarrow as pa
import pyarrow.parquet as pq
from tqdm import tqdm
......@@ -34,7 +33,7 @@ def process_file(filename):
filename (str): Der Pfad zur zu verarbeitenden Log-Datei.
Returns:
pandas.DataFrame: Ein DataFrame mit den einzigartigen, extrahierten URLs.
set: Ein Set mit den einzigartigen, extrahierten URLs.
"""
unique_urls = set()
with open(filename, "r") as file:
......@@ -48,7 +47,7 @@ def process_file(filename):
clean_query_string
): # Nur hinzufügen, wenn es relevante Parameter gibt
unique_urls.add(f"/select?{clean_query_string}")
return pd.DataFrame(list(unique_urls), columns=["url"])
return unique_urls
def extract_urls_from_logs():
......@@ -77,33 +76,27 @@ def extract_urls_from_logs():
writer = pq.ParquetWriter(output_file, schema, compression="snappy")
all_unique_urls = set()
with concurrent.futures.ThreadPoolExecutor(max_workers=10) as executor:
futures = {
executor.submit(process_file, file): file for file in files_to_process
}
# Erstelle einen Fortschrittsbalken für die Gesamtanzahl der Dateien
with tqdm(total=len(files_to_process), desc="Gesamtfortschritt") as pbar:
for future in concurrent.futures.as_completed(futures):
file = futures[future]
try:
df = future.result()
new_unique_urls = set(df["url"]) - all_unique_urls
all_unique_urls.update(new_unique_urls)
if new_unique_urls:
table = pa.Table.from_pandas(
pd.DataFrame(list(new_unique_urls), columns=["url"])
)
writer.write_table(table)
pbar.update(1)
pbar.set_postfix(
{
"Datei": os.path.basename(file),
"Neue einzigartige URLs": len(new_unique_urls),
}
# Erstelle einen Fortschrittsbalken für die Gesamtanzahl der Dateien
with tqdm(total=len(files_to_process), desc="Gesamtfortschritt") as pbar:
for file in files_to_process:
try:
new_unique_urls = process_file(file) - all_unique_urls
all_unique_urls.update(new_unique_urls)
if new_unique_urls:
table = pa.Table.from_pandas(
pd.DataFrame(list(new_unique_urls), columns=["url"])
)
except Exception as exc:
print(f"{file} generierte eine Ausnahme: {exc}")
writer.write_table(table)
pbar.update(1)
pbar.set_postfix(
{
"Datei": os.path.basename(file),
"Neue einzigartige URLs": len(new_unique_urls),
}
)
except Exception as exc:
print(f"{file} generierte eine Ausnahme: {exc}")
writer.close()
return len(all_unique_urls)
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Please register or to comment