|
|
|
@ -1051,6 +1051,15 @@ def new_create_scrape_entries(): |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def new_create_empty_scrape_entry(item): |
|
|
|
|
check_scrape_item = database.session \ |
|
|
|
|
.query(database.ScrapeItems) \ |
|
|
|
|
.filter(database.ScrapeItems.scrape_directory_id == item.directory_id) \ |
|
|
|
|
.first() \ |
|
|
|
|
|
|
|
|
|
if check_scrape_item is not None: |
|
|
|
|
logger.warning('Scrape Item in database') |
|
|
|
|
return |
|
|
|
|
|
|
|
|
|
new_scrape_item = database.ScrapeItems( |
|
|
|
|
scrape_directory = item.directory_path, |
|
|
|
|
scrape_directory_id = item.directory_id |
|
|
|
@ -1069,6 +1078,11 @@ def new_get_scrape_candidates(item): |
|
|
|
|
.query(database.ScrapeItems) \ |
|
|
|
|
.filter(database.ScrapeItems.scrape_directory == item.directory_path) \ |
|
|
|
|
.first() |
|
|
|
|
|
|
|
|
|
if scrape_item.scrape_candidate is not None: |
|
|
|
|
logger.warning('Item Already Scraped') |
|
|
|
|
return |
|
|
|
|
|
|
|
|
|
candidates = cv.search(item.directory_path, limit=10, resources=['volume']) |
|
|
|
|
scrape_candidate = candidates.results[0]['id'] |
|
|
|
|
match_found = False |
|
|
|
|