fixes multiprocessing.
This commit is contained in:
35
collect.py
35
collect.py
@@ -188,21 +188,20 @@ print(timeStartScrape.strftime(fTimeFormat))
|
||||
print("---")
|
||||
|
||||
# Iterate over each Twitter account using multiprocessing
|
||||
# Iterate over each Twitter account using multiprocessing
|
||||
with concurrent.futures.ProcessPoolExecutor() as executor:
|
||||
# List to store the scraping tasks
|
||||
tasks = []
|
||||
for handle in accounts:
|
||||
# Iterate over each time slice
|
||||
for slice_data in time_slices:
|
||||
# ... Code to prepare the slice_data ...
|
||||
# Schedule the scraping task
|
||||
task = executor.submit(
|
||||
scrapeTweets, handle, slice_data, keywords, td, tweetDFColumns
|
||||
)
|
||||
tasks.append(task)
|
||||
# Wait for all tasks to complete
|
||||
concurrent.futures.wait(tasks)
|
||||
# with concurrent.futures.ProcessPoolExecutor() as executor:
|
||||
# # List to store the scraping tasks
|
||||
# tasks = []
|
||||
# for handle in accounts:
|
||||
# # Iterate over each time slice
|
||||
# for slice_data in time_slices:
|
||||
# # ... Code to prepare the slice_data ...
|
||||
# # Schedule the scraping task
|
||||
# task = executor.submit(
|
||||
# scrapeTweets, handle, slice_data, keywords, td, tweetDFColumns
|
||||
# )
|
||||
# # Store the handle and slice_data as attributes of the task
|
||||
# # Wait for all tasks to complete
|
||||
# concurrent.futures.wait(tasks)
|
||||
|
||||
timeEndScrape = datetime.now()
|
||||
print("---")
|
||||
@@ -219,10 +218,14 @@ for handle in accounts:
|
||||
for tslice in time_slices:
|
||||
suffix = tslice['suffix']
|
||||
AllFilesList.append(f"Tweets-{handle}{suffix}.csv")
|
||||
with open(f"{logfile}missing-"+timeStartScrape.strftime(fTimeFormat)+".txt", "w") as fout:
|
||||
with open(f"{logfile}"+timeStartScrape.strftime(fTimeFormat)+"_missing.txt", "w") as fout:
|
||||
for file in AllFilesList:
|
||||
if file not in tweetfiles:
|
||||
fout.write(f'Missing: {file}.\n') # if file is not in tweetfiles, print error message.
|
||||
else:
|
||||
fout.write('all slices scraped.')
|
||||
|
||||
|
||||
# check if file_alltweets (previously scraped tweets that have been merged into one file) exists, if it exists, remove from list to not include it in the following merge
|
||||
if file_alltweets in tweetfiles:
|
||||
tweetfiles.remove(file_alltweets)
|
||||
|
||||
Reference in New Issue
Block a user