From de7c14ed66b412ecd0049fd87c4ba5afb510eab5 Mon Sep 17 00:00:00 2001 From: HugoOnghai Date: Tue, 24 Jun 2025 17:09:32 -0700 Subject: [PATCH 01/65] preliminary version of ELinkGetResponseModel made, using ELinkAPI RecordResponse --- legacy/mpcite/models.py | 302 ++++------------------- legacy/mpcite/recordresponse_example.txt | 0 tests/test_elink_api.py | 59 +++++ 3 files changed, 105 insertions(+), 256 deletions(-) create mode 100644 legacy/mpcite/recordresponse_example.txt create mode 100644 tests/test_elink_api.py diff --git a/legacy/mpcite/models.py b/legacy/mpcite/models.py index b2fab65..e4d055f 100644 --- a/legacy/mpcite/models.py +++ b/legacy/mpcite/models.py @@ -1,101 +1,65 @@ -from pydantic import BaseModel, Field +from pydantic import BaseModel, Field, ConfigDict from typing import List, Dict, Optional -from datetime import datetime +import datetime from enum import Enum import bibtexparser - - -class ConnectionModel(BaseModel): - endpoint: str = Field(..., title="URL Endpoint of the connection") - username: str = Field(..., title="User Name") - password: str = Field(..., title="Password") - - -class RoboCrysModel(BaseModel): - material_id: str - last_updated: datetime - description: Optional[str] = None - error: Optional[str] = None - - @classmethod - def get_default_description(cls): - return ( - "Computed materials data using density " - "functional theory calculations. These calculations determine " - "the electronic structure of bulk materials by solving " - "approximations to the Schrodinger equation. For more " - "information, see https://materialsproject.org/docs/calculations" - ) - - -class MaterialModel(BaseModel): - last_updated: datetime = Field( - None, title="timestamp for the most recent calculation" - ) - updated_at: datetime = Field(None, title="alternative to last_updated") - created_at: datetime = Field( - None, - description="creation time for this material defined by when the first structure " - "optimization calculation was run", - ) - task_id: str = Field( - "", title="task id for this material. Also called the material id" - ) - # pretty_formula: str = Field(..., title="clean representation of the formula") - pretty_formula: str = Field(..., title="clean representation of the formula") - chemsys: str - +from elinkapi import Elink, Record +from elinkapi.record import RecordResponse, AccessLimitation, JournalType +from elinkapi.geolocation import Geolocation +from elinkapi.identifier import Identifier +from elinkapi.related_identifier import RelatedIdentifier +from elinkapi.person import Person +from elinkapi.organization import Organization + +class TestClass(RecordResponse): + ... + # stuff class ELinkGetResponseModel(BaseModel): - osti_id: Optional[str] = Field(...) + osti_id: Optional[int] = Field(...) dataset_type: str = Field(default="SM") title: str = Field(...) - creators: str = Field(default="Kristin Persson") # replace with authors + persons: List[Person] contributors: List[Dict[str, str]] = Field( default=[{"first_name": "Materials", "last_name": "Project"}], description="List of Dict of first name, last name mapping", ) # no contributor - product_nos: str = Field(..., title="MP id") - accession_num: str = Field(..., title="MP id") - contract_nos: str = Field("AC02-05CH11231; EDCBEE") - originating_research_org: str = Field( - default="Lawrence Berkeley National Laboratory (LBNL), Berkeley, CA (United States)" - ) - publication_date: str = Field(...) - language: str = Field(default="English") - country: str = Field(default="US") - sponsor_org: str = Field( - default="USDOE Office of Science (SC), Basic Energy Sciences (BES) (SC-22)" - ) + publication_date: datetime.date site_url: str = Field(...) - contact_name: str = Field(default="Kristin Persson") - contact_org: str = Field(default="LBNL") - contact_email: str = Field(default="feedback@materialsproject.org") - contact_phone: str = Field(default="+1(510)486-7218") - related_resource: str = Field("https://materialsproject.org/citing") - contributor_organizations: str = Field(default="MIT; UC Berkeley; Duke; U Louvain") - subject_categories_code: str = Field(default="36 MATERIALS SCIENCE") - keywords: str = Field(...) - description: str = Field(default="") doi: dict = Field( {}, title="DOI info", description="Mainly used during GET request" ) + mp_id: str | None = None + keywords: List[str] = None @classmethod - def get_title(cls, material: MaterialModel): - formula = material.pretty_formula + def from_elinkapi_record(cls, R): + gotResponse = ELinkGetResponseModel( + osti_id = R.osti_id, + title = R.title, + persons = R.persons, + # assume default contributors for now, creators vs contributors? + publication_date = R.publication_date, + site_url = R.site_url, + doi = {"doi": R.doi}, + mp_id = next((id.value for id in R.identifiers if id.type == 'RN'), None), + keywords = R.keywords + ) + + return gotResponse + + def get_title(self): + formula = self.keywords[1] return "Materials Data on %s by Materials Project" % formula - @classmethod - def get_site_url(cls, mp_id): - return "https://materialsproject.org/materials/%s" % mp_id + def get_site_url(self): + return "https://materialsproject.org/materials/%s" % self.mp_id - @classmethod - def get_keywords(cls, material): - keywords = "; ".join( - ["crystal structure", material.pretty_formula, material.chemsys] - ) - return keywords + def get_keywords(self): + # keywords = "; ".join( + # ["crystal structure", material.pretty_formula, material.chemsys] + # ) + return self.keywords @classmethod def get_default_description(cls): @@ -113,11 +77,11 @@ def custom_to_dict(cls, elink_record) -> dict: return elink_record.dict(exclude={"osti_id", "doi"}) else: return elink_record.dict(exclude={"doi"}) - + class ElinkResponseStatusEnum(Enum): - SUCCESS = "SUCCESS" - FAILED = "FAILURE" + SUCCESS = "SUCCESS" + FAILED = "FAILURE" class ELinkPostResponseModel(BaseModel): @@ -142,178 +106,4 @@ def generate_doi_record(self): ) doi_collection_record.set_status(status=self.doi["@status"]) doi_collection_record.last_validated_on = datetime.now() - return doi_collection_record - - -class DOIRecordStatusEnum(str, Enum): - COMPLETED = "COMPLETED" - PENDING = "PENDING" - FAILURE = "FAILURE" - INIT = "INIT" - - -class DOIRecordModel(BaseModel): - material_id: str = Field(...) - doi: str = Field(default="") - bibtex: Optional[str] = None - status: DOIRecordStatusEnum - valid: bool = Field(False) - last_updated: datetime = Field( - default=datetime.now(), - title="DOI last updated time.", - description="Last updated is defined as either a Bibtex or status change.", - ) - created_at: datetime = Field( - default=datetime.now(), - title="DOI Created At", - description="creation time for this DOI record", - ) - last_validated_on: datetime = Field( - default=datetime.now(), - title="Date Last Validated", - description="Date that this data is last validated, " "not necessarily updated", - ) - elsevier_updated_on: datetime = Field( - default=datetime.now(), - title="Date Elsevier is updated", - description="If None, means never uploaded to elsevier", - ) - error: Optional[str] = Field( - default=None, description="None if no error, else error message" - ) - - class Config: - use_enum_values = True - - def set_status(self, status): - self.status = status - - def get_osti_id(self): - if self.doi is None or self.doi == "": - return "" - else: - return self.doi.split("/")[-1] - - def get_bibtex_abstract(self): - try: - if self.bibtex is None: - return "" - bib_db: bibtexparser.bibdatabase.BibDatabase = bibtexparser.loads( - self.bibtex - ) - if bib_db.entries: - return bib_db.entries[0]["abstractnote"] - except Exception as e: - print(e) - return "" - - -class OSTIDOIRecordModel(DOIRecordModel): - material_id: str = Field(...) - doi: str = Field(default="") - bibtex: Optional[str] = None - valid: bool = Field(False) - last_updated: datetime = Field( - default=datetime.now(), - title="DOI last updated time.", - description="Last updated is defined as either a Bibtex or status change.", - ) - - -class ElsevierPOSTContainerModel(BaseModel): - identifier: str = Field(default="", title="mp_id") - source: str = "MATERIALS_PROJECT" - date: str = datetime.now().date().isoformat().__str__() - title: str - description: str = "" - doi: str - authors: List[str] = ["Kristin Persson"] - url: str - type: str = "dataset" - dateAvailable: str = datetime.now().date().isoformat().__str__() - dateCreated: str = datetime.now().date().isoformat().__str__() - version: str = "1.0.0" - funding: str = "USDOE Office of Science (SC), Basic Energy Sciences (BES) (SC-22)" - language: str = "en" - method: str = "Materials Project" - accessRights: str = "Public" - contact: str = "Kristin Persson " - dataStandard: str = "https://materialsproject.org/citing" - howToCite: str = "https://materialsproject.org/citing" - subjectAreas: List[str] = ["36 MATERIALS SCIENCE"] - keywords: List[str] - institutions: List[str] = ["Lawrence Berkeley National Laboratory"] - institutionIds: List[str] = ["AC02-05CH11231; EDCBEE"] - spatialCoverage: List[str] = [] - temporalCoverage: List[str] = [] - references: List[str] = ["https://materialsproject.org/citing"] - relatedResources: List[str] = ["https://materialsproject.org/citing"] - location: str = "1 Cyclotron Rd, Berkeley, CA 94720" - childContainerIds: List[str] = [] - - @classmethod - def get_url(cls, mp_id): - return "https://materialsproject.org/materials/%s" % mp_id - - @classmethod - def get_keywords(cls, material: MaterialModel): - return ["crystal structure", material.pretty_formula, material.chemsys] - - @classmethod - def get_default_description(cls): - return ( - "Computed materials data using density " - "functional theory calculations. These calculations determine " - "the electronic structure of bulk materials by solving " - "approximations to the Schrodinger equation. For more " - "information, see https://materialsproject.org/docs/calculations" - ) - - @classmethod - def get_date_created(cls, material: MaterialModel) -> str: - return material.created_at.date().__str__() - - @classmethod - def get_date_available(cls, material: MaterialModel) -> str: - return material.created_at.date().__str__() - - @classmethod - def get_title(cls, material: MaterialModel) -> str: - return material.pretty_formula - - @classmethod - def from_material_model(cls, material: MaterialModel, doi: str, description: str): - model = ElsevierPOSTContainerModel( - identifier=material.task_id, - title=material.pretty_formula, - doi=doi, - url="https://materialsproject.org/materials/%s" % material.task_id, - keywords=["crystal structure", material.pretty_formula, material.chemsys], - date=datetime.now().date().__str__(), - dateCreated=material.created_at.date().__str__(), - dateAvailable=ElsevierPOSTContainerModel.get_date_available(material), - description=description, - ) - return model - - -class ExplorerGetJSONResponseModel(BaseModel): - osti_id: str - title: str - report_number: str - doi: str - product_type: str - language: str - country_publication: str - description: str - site_ownership_code: str - publication_date: str - entry_date: str - contributing_organizations: str - authors: List[str] - subjects: List[str] - contributing_org: str - doe_contract_number: str - sponsor_orgs: List[str] - research_orgs: List[str] - links: List[Dict[str, str]] + return doi_collection_record \ No newline at end of file diff --git a/legacy/mpcite/recordresponse_example.txt b/legacy/mpcite/recordresponse_example.txt new file mode 100644 index 0000000..e69de29 diff --git a/tests/test_elink_api.py b/tests/test_elink_api.py new file mode 100644 index 0000000..d1ade7d --- /dev/null +++ b/tests/test_elink_api.py @@ -0,0 +1,59 @@ +import os +from dotenv import load_dotenv + +from elinkapi import Elink, Record, exceptions +import pytest +from mpcite.models import ELinkGetResponseModel, TestClass + +from pymongo import MongoClient + + +load_dotenv() + +atlas_user = os.environ.get("atlas_user") +atlas_password = os.environ.get("atlas_password") +atlas_host = os.environ.get("atlas_host") +mongo_uri = f"mongodb+srv://{atlas_user}:{atlas_password}@{atlas_host}/" + +api = Elink(token=os.environ.get("elink_api_key")) # target default is production E-link service. + +record = api.get_single_record(1190959) +type(record) + +ELinkGotRecordModel = ELinkGetResponseModel.from_elinkapi_record(record) + +print(ELinkGotRecordModel.get_title()) +print(ELinkGotRecordModel.get_site_url()) +print(ELinkGotRecordModel.get_keywords()) +print(ELinkGotRecordModel.get_default_description()) + + + +ELinkTestGetRecordModel = TestClass(**record.model_dump()) + +with MongoClient(mongo_uri) as client: + #get all material_ids and dois from doi collection + doi_collection = client["mp_core"]["dois"] + materials_to_update = list(doi_collection.find({}, {"_id": 0, "material_id": 1, "doi": 1}, limit=10)) + material_ids = [entry["material_id"] for entry in materials_to_update] + + # check # of material_ids from DOI collection vs amount in robocrys + + # get description for material_ids from robocrys collection + coll = client["mp_core_blue"]["robocrys"] + res = list(coll.find({"material_id": {"$in": material_ids}}, {"_id": 0, "material_id": 1, "description": 1})) + + # join on material_id + for doc in res: + mat = next(filter(lambda x: x["material_id"] == doc["material_id"], materials_to_update)) + doc["doi"] = mat["doi"] + + +# {"material_id": ..., "doi": ..., "description": ...} -> +# Record( +# template_fields ..., +# doi: ..., +# description: ..., +# fields_where_material_id_makes_sense: ..., +# ) + From 7aa34d29f7871eafdf62766f513ac400d4e8018d Mon Sep 17 00:00:00 2001 From: HugoOnghai Date: Thu, 26 Jun 2025 18:12:34 -0700 Subject: [PATCH 02/65] queried all desired data entries (stored as batched json files) on ELink found bug with rows greater than 100 on ElinkAPI query_records (144845 dois under 10.17188, 12 are not titled Materials Data On... (edge cases), 144833 Materials have DOIs) --- tests/manage_backfills.py | 49 ++++++++++++++++ tests/outputs.txt | 46 +++++++++++++++ tests/prod_to_review.py | 120 ++++++++++++++++++++++++++++++++++++++ tests/test_elink_api.py | 96 ++++++++++++++++++++++-------- 4 files changed, 288 insertions(+), 23 deletions(-) create mode 100644 tests/manage_backfills.py create mode 100644 tests/outputs.txt create mode 100644 tests/prod_to_review.py diff --git a/tests/manage_backfills.py b/tests/manage_backfills.py new file mode 100644 index 0000000..a835456 --- /dev/null +++ b/tests/manage_backfills.py @@ -0,0 +1,49 @@ +# This script will see how many documents in ELink, i.e. ones with a DOI, are not accounted for in the internal DOI collection. + +from elinkapi import Elink, Query, Record + +import os +from dotenv import load_dotenv + +load_dotenv() # depends on the root directory from which you run your python scripts. + +api = Elink(token = os.environ.get("elink_api_PRODUCTION_key")) + + +query1 = api.query_records(rows=1000) + +materials_with_dois : list[Record] = [] + +for page in query1: + print(f"Now on Page: {page.title}") + print(f"Material_ID: {page.site_unique_id} and DOI: http://doi.org/{page.doi}") + + if page.site_unique_id.startswith("mp-"): + materials_with_dois.append(page) + + # for record in page.data: + # if record.site_unique_id.startswith("mp-"): + # materials_with_dois.append(record) + + + +# set_q1 = [page for page in query1] +# set_q2 = [page for page in query2] + +# set_diffq1q2 = set(set_q1) - set(set_q2) +# print (f"Difference matched {len(set)} records") + +# filtered = [ +# page for page in query1 +# if page.title.lower().startswith("materials data on") +# ] + +# print (f"Filtered Query1 has {len(filtered)} records") + +# paginate through ALL results +# for page in query1: +# print(page.title) +# print(f"Material_ID: {page.site_unique_id} and DOI: http://doi.org/{page.doi}") + +# for record in page.data: +# print (f"OSTI ID: {record.osti_id} Title: {record.title}") \ No newline at end of file diff --git a/tests/outputs.txt b/tests/outputs.txt new file mode 100644 index 0000000..8d188e7 --- /dev/null +++ b/tests/outputs.txt @@ -0,0 +1,46 @@ +(mpcite-env) C:\Users\ongha\OneDrive\Documents\GitHub\MPCite>C:/Users/ongha/anaconda3/envs/mpcite-env/python.exe c:/Users/ongha/OneDrive/Documents/GitHub/MPCite/tests/prod_to_review.py + +Query retrieved 144845 record(s) +Page finished. Now at 500 data entries. 0 edge cases found. +Page finished. Now at 1000 data entries. 0 edge cases found. +Page finished. Now at 1500 data entries. 0 edge cases found. +Page finished. Now at 2000 data entries. 0 edge cases found. +Page finished. Now at 2500 data entries. 0 edge cases found. +Page finished. Now at 3000 data entries. 0 edge cases found. +Page finished. Now at 3500 data entries. 0 edge cases found. +Page finished. Now at 4000 data entries. 0 edge cases found. +Page finished. Now at 4500 data entries. 0 edge cases found. +Page finished. Now at 5000 data entries. 0 edge cases found. +Page finished. Now at 5500 data entries. 0 edge cases found. +Page finished. Now at 6000 data entries. 0 edge cases found. +Page finished. Now at 6500 data entries. 0 edge cases found. +Page finished. Now at 7000 data entries. 0 edge cases found. +Page finished. Now at 7500 data entries. 0 edge cases found. +Page finished. Now at 8000 data entries. 0 edge cases found. +Page finished. Now at 8500 data entries. 0 edge cases found. +Page finished. Now at 9000 data entries. 0 edge cases found. +Page finished. Now at 9500 data entries. 0 edge cases found. +Page finished. Now at 10000 data entries. 0 edge cases found. +Page finished. Now at 10500 data entries. 0 edge cases found. +Page finished. Now at 11000 data entries. 0 edge cases found. +Page finished. Now at 11500 data entries. 0 edge cases found. +Page finished. Now at 12000 data entries. 0 edge cases found. +Page finished. Now at 12500 data entries. 0 edge cases found. +Page finished. Now at 13000 data entries. 0 edge cases found. +Page finished. Now at 13500 data entries. 0 edge cases found. +Page finished. Now at 14000 data entries. 0 edge cases found. +Page finished. Now at 14500 data entries. 0 edge cases found. + +Traceback (most recent call last): + File "C:\Users\ongha\anaconda3\envs\mpcite-env\Lib\site-packages\elinkapi\query.py", line 95, in __next__ + record = self.data.pop() +IndexError: pop from empty list + +During handling of the above exception, another exception occurred: + +Traceback (most recent call last): + File "c:\Users\ongha\OneDrive\Documents\GitHub\MPCite\tests\prod_to_review.py", line 29, in + record = next(query) + File "C:\Users\ongha\anaconda3\envs\mpcite-env\Lib\site-packages\elinkapi\query.py", line 108, in __next__ + raise StopIteration +StopIteration \ No newline at end of file diff --git a/tests/prod_to_review.py b/tests/prod_to_review.py new file mode 100644 index 0000000..87e311d --- /dev/null +++ b/tests/prod_to_review.py @@ -0,0 +1,120 @@ +from elinkapi import Elink, Query, Record + +import os +from dotenv import load_dotenv + +import json + +load_dotenv() # depends on the root directory from which you run your python scripts. + +review_endpoint = "https://review.osti.gov/elink2api/" + +prod_api = Elink(token = os.environ.get("elink_api_PRODUCTION_key")) +review_api = Elink(token = os.environ.get("elink_review_api_token"), target=review_endpoint) + +print(prod_api.query_records()) + +rows_per_page = 100 + +# query production +query = prod_api.query_records(rows=rows_per_page) +print(f"Query retrieved {query.total_rows} record(s)") + +count_materials_data = 0 +count_MaterialsDataOn = 0 +cwd = os.getcwd() +page_number = 0 +page_json_list = [] + +for record in query: + # increment counter + count_materials_data = count_materials_data + 1 + print(f"On record #{count_materials_data}, next url is {query.next_url}, previous url is {query.previous_url}") + + # see if the record is a Materials Data on record + if record.title.startswith("Materials Data on"): + # increment the MaterialsDataOn counter + count_MaterialsDataOn = count_MaterialsDataOn + 1 + + # prepare the new record for the review environment, remove the OSTI ID, and add its model_dump to the list of json objects for the page. + new_record = record + new_record_dict = new_record.model_dump(exclude_none=True) + + new_record_osti_id = new_record_dict.pop("osti_id") # now new_record_dict does not have the osti_id key. + js = json.dumps(new_record_dict, default=str) # datetime objects are not JSON serializable, so we use default=str to convert them to strings. + + page_json_list.append(js) + + # TODO: take the new_record_dict and make it into a new post to the review environment and save the RecordResponse. + + else: + print(f"Found edge case: {record.title}") + + if count_materials_data % rows_per_page == 0: + # create/open, write, and close new json file + page_number = count_materials_data / rows_per_page + path = f'/json_pages/page_number_{page_number}' + fp = open(cwd+path, 'a') + + for js in page_json_list: + fp.write(js) + fp.write("\n") + + fp.close() + page_json_list = [] + + print(f"Page {page_number} finished. Now at {count_materials_data} data entries. {count_materials_data - count_MaterialsDataOn} edge cases found.") + +# print remainder of records if not a full page after for loop exits +page_number = page_number + 1 +path = f'/json_pages/page_number_{page_number}' +fp = open(cwd+path, 'a') +for js in page_json_list: + fp.write(js) + fp.write("\n") +fp.close() + +# # if contains materials data on, then add to batch +# for count_materials_data < query.total_rows: + +# # print(f"The length of the query is now {len(query.data)}") +# record = next(query) +# count_materials_data = count_materials_data + 1 + +# if record.title.startswith("Materials Data on"): +# count_MaterialsDataOn = count_MaterialsDataOn + 1 + +# new_record = record +# new_record_dict = new_record.model_dump(exclude_none=True) + +# new_record_osti_id = new_record_dict.pop("osti_id") + +# page_dict[f"Entry OSTI_ID {new_record_osti_id}"] = new_record_dict + +# # TODO: take the new_record_dict and make it into a new post to the review environment and save the RecordResponse. + + + +# if count_materials_data % rows_per_page == 0: +# # if a page has been fully consummed, then print the new batched dictionary to a json file. + +# js = json.dumps(page_dict, default=str) + +# # open new json file if not exist it will create +# cwd = os.getcwd() +# path = f'/json_pages/page_number_{count_materials_data/rows_per_page}' +# fp = open(cwd+path, 'a') + +# # write to json file +# fp.write(js) + +# # close the connection to the file and empty the dict +# fp.close() +# page_dict = {} + +# print(f"Page {(count_materials_data / rows_per_page)} finished. Now at {count_materials_data} data entries. {count_materials_data - count_MaterialsDataOn} edge cases found.") + +# model_dump exclude_none=True, remove null keys +# pop osti_id --> save batch to json files +# make new record +# post to review_api diff --git a/tests/test_elink_api.py b/tests/test_elink_api.py index d1ade7d..80afba7 100644 --- a/tests/test_elink_api.py +++ b/tests/test_elink_api.py @@ -6,7 +6,7 @@ from mpcite.models import ELinkGetResponseModel, TestClass from pymongo import MongoClient - +import pymongo load_dotenv() @@ -15,38 +15,41 @@ atlas_host = os.environ.get("atlas_host") mongo_uri = f"mongodb+srv://{atlas_user}:{atlas_password}@{atlas_host}/" -api = Elink(token=os.environ.get("elink_api_key")) # target default is production E-link service. +api = Elink(token=os.environ.get("elink_api_PRODUCTION_key")) # target default is production E-link service. -record = api.get_single_record(1190959) -type(record) +### Grabbing an existing record -ELinkGotRecordModel = ELinkGetResponseModel.from_elinkapi_record(record) +# record = api.get_single_record(mp-id) # test for silicon -print(ELinkGotRecordModel.get_title()) -print(ELinkGotRecordModel.get_site_url()) -print(ELinkGotRecordModel.get_keywords()) -print(ELinkGotRecordModel.get_default_description()) +# type(record) +# ELinkGotRecordModel = ELinkGetResponseModel.from_elinkapi_record(record) +# print(ELinkGotRecordModel.get_title()) +# print(ELinkGotRecordModel.get_site_url()) +# print(ELinkGotRecordModel.get_keywords()) +# print(ELinkGotRecordModel.get_default_description()) -ELinkTestGetRecordModel = TestClass(**record.model_dump()) +# ELinkTestGetRecordModel = TestClass(**record.model_dump()) -with MongoClient(mongo_uri) as client: - #get all material_ids and dois from doi collection - doi_collection = client["mp_core"]["dois"] - materials_to_update = list(doi_collection.find({}, {"_id": 0, "material_id": 1, "doi": 1}, limit=10)) - material_ids = [entry["material_id"] for entry in materials_to_update] +### Making a new record + +# with MongoClient(mongo_uri) as client: +# #get all material_ids and dois from doi collection +# doi_collection = client["mp_core"]["dois"] +# materials_to_update = list(doi_collection.find({}, {"_id": 0, "material_id": 1, "doi": 1}, limit=10)) +# material_ids = [entry["material_id"] for entry in materials_to_update] - # check # of material_ids from DOI collection vs amount in robocrys +# # check # of material_ids from DOI collection vs amount in robocrys - # get description for material_ids from robocrys collection - coll = client["mp_core_blue"]["robocrys"] - res = list(coll.find({"material_id": {"$in": material_ids}}, {"_id": 0, "material_id": 1, "description": 1})) +# # get description for material_ids from robocrys collection +# coll = client["mp_core_blue"]["robocrys"] +# res = list(coll.find({"material_id": {"$in": material_ids}}, {"_id": 0, "material_id": 1, "description": 1})) - # join on material_id - for doc in res: - mat = next(filter(lambda x: x["material_id"] == doc["material_id"], materials_to_update)) - doc["doi"] = mat["doi"] +# # join on material_id +# for doc in res: +# mat = next(filter(lambda x: x["material_id"] == doc["material_id"], materials_to_update)) +# doc["doi"] = mat["doi"] # {"material_id": ..., "doi": ..., "description": ...} -> @@ -57,3 +60,50 @@ # fields_where_material_id_makes_sense: ..., # ) +# with the client open +with MongoClient(mongo_uri) as client: + # get all dois from the collection + doi_collection = client["mp_core"]["dois"] + materials_to_update = list(doi_collection.find({}, {"_id": 0, "doi": 1, "material_id": 1}, limit=2)) + + # from the doi collection, grab the material_id and doi of each material + material_ids = [entry["material_id"] for entry in materials_to_update] + + # additionally, gain the osti id from the doi + osti_ids = [entry["doi"].split("10.17188/")[1] for entry in materials_to_update] + + # additionally, grab the description of each material from the robocrys + coll = client["mp_core_blue"]["robocrys"] # grabs robocrys collection from active database + res = list(coll.find({"material_id": {"$in": material_ids}}, {"_id": 0, "material_id": 1, "description": 1})) # grabs the material id and description of entries in the collection + descriptions = [entry["description"] for entry in res] + + # for each material (and its material_id, doi, and osti_id) + for i in range(len(materials_to_update)): + internal_material_id = material_ids[i] + internal_osti_id = osti_ids[i] + internal_description = descriptions[i] + + # get_single_record(osti_id) + record = api.get_single_record(internal_osti_id) + + print(f"\n \n \nPrinting what is currently on ELINK for {internal_material_id}*****************************************") + print(record) + + if internal_material_id == record.site_unique_id: + # update description + record.description = "testTESTtestTESTtest" + + print(f"\n \n \nPrinting record for {internal_material_id}~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~") + print(record) + + # # post updated record + # try: + # saved_record = api.post_new_record(record, "save") + # except exceptions.BadRequestException as ve: + # ... + # # ve.message = "Site Code AAAA is not valid." + # # ve.errors provides more details: + # # [{"status":"400", "detail":"Site Code AAAA is not valid.", "source":{"pointer":"site_ownership_code"}}] + + + From 6054dbd904e2e73263636584946877cdbe7126bd Mon Sep 17 00:00:00 2001 From: HugoOnghai <99376417+HugoOnghai@users.noreply.github.com> Date: Fri, 11 Jul 2025 17:01:29 -0700 Subject: [PATCH 03/65] Merged upstream (#1) * move old code to 'legacy' * setup project using uv * add license * testing skeleton * gh actions skeleton * remove old reqs file to prevent dependabot alerts --------- Co-authored-by: Tyler Mathis <35553152+tsmathis@users.noreply.github.com> --- .github/workflows/lint.yml | 4 ++ .github/workflows/release.yml | 8 +++ .github/workflows/testing.yml | 5 ++ legacy/mpcite/models.py | 109 -------------------------------- src/mp_cite/core.py | 89 +++++++++++++++++++++++--- src/mp_cite/models.py | 112 +++++++++++++++++++++++++++++++++ src/mp_cite/send_collection.py | 79 +++++++++++++++++++++++ uv.lock | 90 +++++++++++++------------- 8 files changed, 334 insertions(+), 162 deletions(-) delete mode 100644 legacy/mpcite/models.py create mode 100644 src/mp_cite/send_collection.py diff --git a/.github/workflows/lint.yml b/.github/workflows/lint.yml index 1a76e83..c854112 100644 --- a/.github/workflows/lint.yml +++ b/.github/workflows/lint.yml @@ -5,6 +5,7 @@ on: branches: [master] pull_request: branches: [master] +<<<<<<< HEAD workflow_dispatch: jobs: @@ -21,3 +22,6 @@ jobs: - name: Analyzing the code with ruff run: uvx ruff check --output-format=github +======= +# TODO: setup linting with uv/ruff +>>>>>>> 52382ff (Merged upstream (#1)) diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index e5d2a28..6f3c42d 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -1,6 +1,7 @@ name: release on: +<<<<<<< HEAD release: types: [published] @@ -70,3 +71,10 @@ jobs: - name: Build and Deploy! run: uvx mkdocs gh-deploy +======= + push: + branches: [master] + pull_request: + branches: [master] +# TODO: setup release to pypi +>>>>>>> 52382ff (Merged upstream (#1)) diff --git a/.github/workflows/testing.yml b/.github/workflows/testing.yml index 67e0f21..9f71a9e 100644 --- a/.github/workflows/testing.yml +++ b/.github/workflows/testing.yml @@ -17,6 +17,11 @@ jobs: runs-on: ${{ matrix.os }} steps: - uses: actions/checkout@v4 +<<<<<<< HEAD +======= + with: + fetch-depth: 0 +>>>>>>> 52382ff (Merged upstream (#1)) - name: Install uv uses: astral-sh/setup-uv@v6 diff --git a/legacy/mpcite/models.py b/legacy/mpcite/models.py deleted file mode 100644 index e4d055f..0000000 --- a/legacy/mpcite/models.py +++ /dev/null @@ -1,109 +0,0 @@ -from pydantic import BaseModel, Field, ConfigDict -from typing import List, Dict, Optional -import datetime -from enum import Enum -import bibtexparser -from elinkapi import Elink, Record -from elinkapi.record import RecordResponse, AccessLimitation, JournalType -from elinkapi.geolocation import Geolocation -from elinkapi.identifier import Identifier -from elinkapi.related_identifier import RelatedIdentifier -from elinkapi.person import Person -from elinkapi.organization import Organization - -class TestClass(RecordResponse): - ... - # stuff - -class ELinkGetResponseModel(BaseModel): - osti_id: Optional[int] = Field(...) - dataset_type: str = Field(default="SM") - title: str = Field(...) - persons: List[Person] - contributors: List[Dict[str, str]] = Field( - default=[{"first_name": "Materials", "last_name": "Project"}], - description="List of Dict of first name, last name mapping", - ) # no contributor - publication_date: datetime.date - site_url: str = Field(...) - doi: dict = Field( - {}, title="DOI info", description="Mainly used during GET request" - ) - mp_id: str | None = None - keywords: List[str] = None - - @classmethod - def from_elinkapi_record(cls, R): - gotResponse = ELinkGetResponseModel( - osti_id = R.osti_id, - title = R.title, - persons = R.persons, - # assume default contributors for now, creators vs contributors? - publication_date = R.publication_date, - site_url = R.site_url, - doi = {"doi": R.doi}, - mp_id = next((id.value for id in R.identifiers if id.type == 'RN'), None), - keywords = R.keywords - ) - - return gotResponse - - def get_title(self): - formula = self.keywords[1] - return "Materials Data on %s by Materials Project" % formula - - def get_site_url(self): - return "https://materialsproject.org/materials/%s" % self.mp_id - - def get_keywords(self): - # keywords = "; ".join( - # ["crystal structure", material.pretty_formula, material.chemsys] - # ) - return self.keywords - - @classmethod - def get_default_description(cls): - return ( - "Computed materials data using density " - "functional theory calculations. These calculations determine " - "the electronic structure of bulk materials by solving " - "approximations to the Schrodinger equation. For more " - "information, see https://materialsproject.org/docs/calculations" - ) - - @classmethod - def custom_to_dict(cls, elink_record) -> dict: - if elink_record.osti_id is None or elink_record.osti_id == "": - return elink_record.dict(exclude={"osti_id", "doi"}) - else: - return elink_record.dict(exclude={"doi"}) - - -class ElinkResponseStatusEnum(Enum): - SUCCESS = "SUCCESS" - FAILED = "FAILURE" - - -class ELinkPostResponseModel(BaseModel): - osti_id: str - accession_num: str - product_nos: str - title: str - contract_nos: str - other_identifying_nos: Optional[str] - doi: Dict[str, str] - status: ElinkResponseStatusEnum - status_message: Optional[str] - - def generate_doi_record(self): - doi_collection_record = DOIRecordModel( - material_id=self.accession_num, - doi=self.doi["#text"], - status=self.doi["@status"], - bibtex=None, - valid=True, - last_validated_on=datetime.now(), - ) - doi_collection_record.set_status(status=self.doi["@status"]) - doi_collection_record.last_validated_on = datetime.now() - return doi_collection_record \ No newline at end of file diff --git a/src/mp_cite/core.py b/src/mp_cite/core.py index 0e4d698..bb78102 100644 --- a/src/mp_cite/core.py +++ b/src/mp_cite/core.py @@ -1,20 +1,13 @@ -from typing import TypeAlias - from elinkapi import Elink from elinkapi.record import RecordResponse from pymongo import MongoClient - import requests from elinkapi.utils import Validation - - from mp_cite.doi_builder import MinimumDARecord - -from typing import Literal +from typing import Literal, TypeAlias OstiID: TypeAlias = int - def find_out_of_date_doi_entries( rc_client: MongoClient, doi_client: MongoClient, @@ -23,6 +16,7 @@ def find_out_of_date_doi_entries( doi_db: str, doi_collection: str, ) -> list[OstiID]: +<<<<<<< HEAD """ find_out_of_date_doi_entries queries MP's mongo collections to find all robocrys documents that were updated less recently than the latest doi document @@ -35,6 +29,8 @@ def find_out_of_date_doi_entries( returns a list containing all OSTI IDs associated with out-of-date doi entries. """ +======= +>>>>>>> 5fa46e4 (Merged upstream (#1)) robocrys = rc_client[robocrys_db][robocrys_collection] dois = doi_client[doi_db][doi_collection] @@ -69,6 +65,7 @@ def find_out_of_date_doi_entries( def update_existing_osti_record( +<<<<<<< HEAD elinkapi: Elink, osti_id: OstiID, new_values: dict ) -> RecordResponse: """ @@ -100,10 +97,33 @@ def update_existing_osti_record( return elinkapi.update_record( osti_id, record_on_elink, state="save" ) # user should use update_state_of_osti_record to submit instead +======= + elinkapi: Elink, + osti_id: OstiID, + new_values: dict +) -> RecordResponse: + record_on_elink = elinkapi.get_single_record(osti_id) + + for keyword in new_values.keys(): + try: + setattr(record_on_elink, keyword, new_values[keyword]) + except ValueError: + print("Extraneous keywords found in the dictionary that do not correspond to attributes in the ELink API's record class.") + + # assume the use with fix the sponsor identifier bug before calling the update function + # # fix the issue with the sponsor organization's identifiers + # for entry in record_on_elink.organizations: + # if entry.type == "SPONSOR": + # entry.identifiers = [{"type": 'CN_DOE', "value": 'AC02-05CH11231'}] + # break + + return elinkapi.update_record(osti_id, record_on_elink, state="save") # user should use update_state_of_osti_record to submit instead +>>>>>>> 5fa46e4 (Merged upstream (#1)) def submit_new_osti_record( elinkapi: Elink, +<<<<<<< HEAD new_values: dict, state="submit", ) -> RecordResponse: @@ -123,12 +143,20 @@ def submit_new_osti_record( new_record = MinimumDARecord( **new_values ) # record is an instance of the MinimumDARecord model which gives default values to all necessary fields (EXCEPT Title) +======= + new_record: Record, + state = "submit", # assuming there is no need to both with saving. just send new record to osti when its ready for submission. also assume bug with DOE contract number identifier in sponsor organization is accounted for +) -> RecordResponse: + # template for all repeated stuff + # only submit +>>>>>>> 5fa46e4 (Merged upstream (#1)) record_response = elinkapi.post_new_record(new_record, state) return record_response def update_state_of_osti_record( +<<<<<<< HEAD elinkapi: Elink, osti_id: OstiID, new_state: Literal["save", "submit"] ) -> RecordResponse: """ @@ -160,3 +188,48 @@ def delete_osti_record(elinkapi: Elink, osti_id: OstiID, reason: str) -> bool: ) Validation.handle_response(response) return response.status_code == 204 # True if deleted successfully +======= + elinkapi: Elink, + osti_id: OstiID, + new_state = "submit" +) -> RecordResponse: + record = elinkapi.get_single_record(osti_id) + + # assuming that the user will handle the sponsor identifier bug before calling this function + # # fix the issue with the sponsor organization's identifiers + # for entry in record.organizations: + # if entry.type == "SPONSOR": + # entry.identifiers = [{"type": 'CN_DOE', "value": 'AC02-05CH11231'}] + # break + + return elinkapi.update_record(osti_id, record, new_state) + + +def delete_osti_record( + elinkapi: Elink, + osti_id: OstiID, + reason: str +) -> RecordResponse: + """Delete a record by its OSTI ID.""" + response = requests.delete(f"{elinkapi.target}records/{osti_id}?reason={reason}", headers={"Authorization": f"Bearer {elinkapi.token}"}) + Validation.handle_response(response) + return response.status_code == 204 # True if deleted successfully + +def emptyReviewAPI(reason, review_api): + allDeleted = True + for record in review_api.query_records(): + delete_osti_record(review_api, record.osti_id, reason) + +def make_minimum_record_to_fully_release( + title, # required to make record + product_type = "DA", # required to make record + organizations = [Organization(type='RESEARCHING', name='LBNL Materials Project (LBNL-MP)'), + Organization(type='SPONSOR', name='TEST SPONSOR ORG', identifiers=[{"type": 'CN_DOE', "value": 'AC02-05CH11231'}])], # sponsor org is necessary for submission + persons = [Person(type='AUTHOR', last_name='Perrson')], + site_ownership_code = "LBNL-MP", + access_limitations = ['UNL'], + publication_date = datetime.now().replace(hour=0, minute=0, second=0, microsecond=0), # what should this be? + site_url = "https://next-gen.materialsproject.org/materials" +) -> Record: + return Record(product_type, title, persons, site_ownership_code, access_limitations, publication_date, site_url) +>>>>>>> 5fa46e4 (Merged upstream (#1)) diff --git a/src/mp_cite/models.py b/src/mp_cite/models.py index 8bbaf74..71b4c9a 100644 --- a/src/mp_cite/models.py +++ b/src/mp_cite/models.py @@ -1,3 +1,4 @@ +<<<<<<< HEAD from pydantic import BaseModel, Field, model_validator from datetime import datetime @@ -82,3 +83,114 @@ class MinimumDARecord(Record): default_factory=lambda: datetime.now(tz=pytz.UTC) ) site_url: str = Field(default="https://next-gen.materialsproject.org/materials") +======= +from pydantic import BaseModel, Field, ConfigDict +from typing import List, Dict, Optional +import datetime +from enum import Enum +import bibtexparser +from elinkapi import Elink, Record +from elinkapi.record import RecordResponse, AccessLimitation, JournalType +from elinkapi.geolocation import Geolocation +from elinkapi.identifier import Identifier +from elinkapi.related_identifier import RelatedIdentifier +from elinkapi.person import Person +from elinkapi.organization import Organization + +class TestClass(RecordResponse): + ... + # stuff + +class ELinkGetResponseModel(BaseModel): + osti_id: Optional[int] = Field(...) + dataset_type: str = Field(default="SM") + title: str = Field(...) + persons: List[Person] + contributors: List[Dict[str, str]] = Field( + default=[{"first_name": "Materials", "last_name": "Project"}], + description="List of Dict of first name, last name mapping", + ) # no contributor + publication_date: datetime.date + site_url: str = Field(...) + doi: dict = Field( + {}, title="DOI info", description="Mainly used during GET request" + ) + mp_id: str | None = None + keywords: List[str] = None + + @classmethod + def from_elinkapi_record(cls, R): + gotResponse = ELinkGetResponseModel( + osti_id = R.osti_id, + title = R.title, + persons = R.persons, + # assume default contributors for now, creators vs contributors? + publication_date = R.publication_date, + site_url = R.site_url, + doi = {"doi": R.doi}, + mp_id = next((id.value for id in R.identifiers if id.type == 'RN'), None), + keywords = R.keywords + ) + + return gotResponse + + def get_title(self): + formula = self.keywords[1] + return "Materials Data on %s by Materials Project" % formula + + def get_site_url(self): + return "https://materialsproject.org/materials/%s" % self.mp_id + + def get_keywords(self): + # keywords = "; ".join( + # ["crystal structure", material.pretty_formula, material.chemsys] + # ) + return self.keywords + + @classmethod + def get_default_description(cls): + return ( + "Computed materials data using density " + "functional theory calculations. These calculations determine " + "the electronic structure of bulk materials by solving " + "approximations to the Schrodinger equation. For more " + "information, see https://materialsproject.org/docs/calculations" + ) + + @classmethod + def custom_to_dict(cls, elink_record) -> dict: + if elink_record.osti_id is None or elink_record.osti_id == "": + return elink_record.dict(exclude={"osti_id", "doi"}) + else: + return elink_record.dict(exclude={"doi"}) + + +class ElinkResponseStatusEnum(Enum): + SUCCESS = "SUCCESS" + FAILED = "FAILURE" + + +class ELinkPostResponseModel(BaseModel): + osti_id: str + accession_num: str + product_nos: str + title: str + contract_nos: str + other_identifying_nos: Optional[str] + doi: Dict[str, str] + status: ElinkResponseStatusEnum + status_message: Optional[str] + + def generate_doi_record(self): + doi_collection_record = DOIRecordModel( + material_id=self.accession_num, + doi=self.doi["#text"], + status=self.doi["@status"], + bibtex=None, + valid=True, + last_validated_on=datetime.now(), + ) + doi_collection_record.set_status(status=self.doi["@status"]) + doi_collection_record.last_validated_on = datetime.now() + return doi_collection_record +>>>>>>> 5fa46e4 (Merged upstream (#1)) diff --git a/src/mp_cite/send_collection.py b/src/mp_cite/send_collection.py new file mode 100644 index 0000000..0ce65a3 --- /dev/null +++ b/src/mp_cite/send_collection.py @@ -0,0 +1,79 @@ +from pathlib import Path +from xml.dom.minidom import parseString +from dicttoxml import dicttoxml +from mpcite.doi_builder import DOIBuilder +import json +from monty.json import MontyDecoder +from pydantic import BaseModel, Field +from typing import List + +default_description = ( + "Computed materials data using density functional theory calculations. These " + "calculations determine the electronic structure of bulk materials by solving " + "approximations to the Schrodinger equation. For more information, " + "see https://materialsproject.org/docs/calculations" +) + + +class CollectionsModel(BaseModel): + title: str = Field(default="Sample Title") + product_type: str = Field(default="DC") + relidentifiersblock: List[List[str]] = Field() + contributors: List[dict] + description: str = Field(default=default_description) + site_url: str = Field(default="https://materialsproject.org/") + + +config_file = Path("/Users/michaelwu/Desktop/projects/MPCite/files/config_prod.json") + +bld: DOIBuilder = json.load(config_file.open("r"), cls=MontyDecoder) +bld.config_file_path = config_file.as_posix() + +records = [ + CollectionsModel( + relidentifiersblock=[["mp-1", "mp-2", "mp-1"]], + contributors=[ + { + "first_name": "Michael", + "last_name": "Wu", + "email": "wuxiaohua1011@berkeley.edu", + } + ], + ).dict(), + CollectionsModel( + relidentifiersblock=[["mp-21"], ["mp-22"]], + contributors=[ + { + "first_name": "Michael", + "last_name": "Wu", + "email": "wuxiaohua1011@berkeley.edu", + } + ], + ).dict(), +] + + +def my_item_func(x): + if x == "records": + return "record" + elif x == "contributors": + return "contributor" + elif x == "relidentifier_detail": + return "related_identifier" + elif x == "relidentifiersblock": + return "relidentifier_detail" + else: + return "item" + + +records_xml = parseString( + dicttoxml(records, custom_root="records", attr_type=False, item_func=my_item_func) +) + +for item in records_xml.getElementsByTagName("relidentifier_detail"): + item.setAttribute("type", "accession_num") + item.setAttribute("relationType", "Compiles") + +print(records_xml.toprettyxml()) +# response = bld.elink_adapter.post_collection(data=records_xml.toxml()) +# print(response) diff --git a/uv.lock b/uv.lock index b6a2a78..e6682a7 100644 --- a/uv.lock +++ b/uv.lock @@ -118,7 +118,7 @@ wheels = [ [[package]] name = "elinkapi" -version = "0.5.0" +version = "0.5.1" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "pydantic" }, @@ -126,9 +126,9 @@ dependencies = [ { name = "requests-toolbelt" }, { name = "urllib3" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/af/6b/1f146b90638a018eee47e77bc59345cb871b89592f5f33f74f94afb4f596/elinkapi-0.5.0.tar.gz", hash = "sha256:957a0430b0fd6112dcdbe22593c40f4dd9ce2543349fa173a8989d121b28a421", size = 51191, upload-time = "2025-07-30T14:40:41.032Z" } +sdist = { url = "https://files.pythonhosted.org/packages/77/11/aa584c66c16a417433a6ac51d232e4cf35a1b5c5a8a747193c73503c8b14/elinkapi-0.5.1.tar.gz", hash = "sha256:33e73648bcb5272e458215698219dcc1c09645f0726798883a2adcdc07f5e00e", size = 51606, upload-time = "2025-08-06T17:49:57.796Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/c6/7e/bbd7b56119bc41535048474ffe20ab7378fd8eb2dcd2a701784fab73489f/elinkapi-0.5.0-py3-none-any.whl", hash = "sha256:d61e36bea06ca10a58c9272fe79991af63c78e3dba11837eefa71f21fe1e61f0", size = 37333, upload-time = "2025-07-30T14:40:39.843Z" }, + { url = "https://files.pythonhosted.org/packages/86/28/dec8dfc0a2ddd7ba16a90c29bb7c832f9323b5b2c6bb9699244601bdb289/elinkapi-0.5.1-py3-none-any.whl", hash = "sha256:0ab14ed05a5860480697dba860cb684b77cda042006212e597b5c5ec253df481", size = 37695, upload-time = "2025-08-06T17:49:56.434Z" }, ] [[package]] @@ -535,49 +535,49 @@ wheels = [ [[package]] name = "pymongo" -version = "4.13.2" +version = "4.14.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "dnspython" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/4b/5a/d664298bf54762f0c89b8aa2c276868070e06afb853b4a8837de5741e5f9/pymongo-4.13.2.tar.gz", hash = "sha256:0f64c6469c2362962e6ce97258ae1391abba1566a953a492562d2924b44815c2", size = 2167844, upload-time = "2025-06-16T18:16:30.685Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/94/df/4c4ef17b48c70120f834ba7151860c300924915696c4a57170cb5b09787f/pymongo-4.13.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:7af8c56d0a7fcaf966d5292e951f308fb1f8bac080257349e14742725fd7990d", size = 857145, upload-time = "2025-06-16T18:14:56.516Z" }, - { url = "https://files.pythonhosted.org/packages/e7/41/480ca82b3b3320fc70fe699a01df28db15a4ea154c8759ab4a437a74c808/pymongo-4.13.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:ad24f5864706f052b05069a6bc59ff875026e28709548131448fe1e40fc5d80f", size = 857437, upload-time = "2025-06-16T18:14:58.572Z" }, - { url = "https://files.pythonhosted.org/packages/50/d4/eb74e98ea980a5e1ec4f06f383ec6c52ab02076802de24268f477ef616d2/pymongo-4.13.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a10069454195d1d2dda98d681b1dbac9a425f4b0fe744aed5230c734021c1cb9", size = 1426516, upload-time = "2025-06-16T18:15:00.589Z" }, - { url = "https://files.pythonhosted.org/packages/aa/fe/c5960c0e6438bd489367261e5ef1a5db01e34349f0dbf7529fb938d3d2ef/pymongo-4.13.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3e20862b81e3863bcd72334e3577a3107604553b614a8d25ee1bb2caaea4eb90", size = 1477477, upload-time = "2025-06-16T18:15:02.283Z" }, - { url = "https://files.pythonhosted.org/packages/f6/9f/ef4395175fc97876978736c8493d8ffa4d13aa7a4e12269a2cb0d52a1246/pymongo-4.13.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:6b4d5794ca408317c985d7acfb346a60f96f85a7c221d512ff0ecb3cce9d6110", size = 1451921, upload-time = "2025-06-16T18:15:04.35Z" }, - { url = "https://files.pythonhosted.org/packages/2a/b9/397cb2a3ec03f880e882102eddcb46c3d516c6cf47a05f44db48067924d9/pymongo-4.13.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9c8e0420fb4901006ae7893e76108c2a36a343b4f8922466d51c45e9e2ceb717", size = 1431045, upload-time = "2025-06-16T18:15:06.392Z" }, - { url = "https://files.pythonhosted.org/packages/f5/0d/e150a414e5cb07f2fefca817fa071a6da8d96308469a85a777244c8c4337/pymongo-4.13.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:239b5f83b83008471d54095e145d4c010f534af99e87cc8877fc6827736451a0", size = 1399697, upload-time = "2025-06-16T18:15:08.975Z" }, - { url = "https://files.pythonhosted.org/packages/b8/29/5190eafb994721c30a38a8a62df225c47a9da364ab5c8cffe90aabf6a54e/pymongo-4.13.2-cp311-cp311-win32.whl", hash = "sha256:6bceb524110c32319eb7119422e400dbcafc5b21bcc430d2049a894f69b604e5", size = 836261, upload-time = "2025-06-16T18:15:10.459Z" }, - { url = "https://files.pythonhosted.org/packages/d3/da/30bdcc83b23fc4f2996b39b41b2ff0ff2184230a78617c7b8636aac4d81d/pymongo-4.13.2-cp311-cp311-win_amd64.whl", hash = "sha256:ab87484c97ae837b0a7bbdaa978fa932fbb6acada3f42c3b2bee99121a594715", size = 851451, upload-time = "2025-06-16T18:15:12.181Z" }, - { url = "https://files.pythonhosted.org/packages/03/e0/0e187750e23eed4227282fcf568fdb61f2b53bbcf8cbe3a71dde2a860d12/pymongo-4.13.2-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:ec89516622dfc8b0fdff499612c0bd235aa45eeb176c9e311bcc0af44bf952b6", size = 912004, upload-time = "2025-06-16T18:15:14.299Z" }, - { url = "https://files.pythonhosted.org/packages/57/c2/9b79795382daaf41e5f7379bffdef1880d68160adea352b796d6948cb5be/pymongo-4.13.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:f30eab4d4326df54fee54f31f93e532dc2918962f733ee8e115b33e6fe151d92", size = 911698, upload-time = "2025-06-16T18:15:16.334Z" }, - { url = "https://files.pythonhosted.org/packages/6f/e4/f04dc9ed5d1d9dbc539dc2d8758dd359c5373b0e06fcf25418b2c366737c/pymongo-4.13.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0cce9428d12ba396ea245fc4c51f20228cead01119fcc959e1c80791ea45f820", size = 1690357, upload-time = "2025-06-16T18:15:18.358Z" }, - { url = "https://files.pythonhosted.org/packages/bb/de/41478a7d527d38f1b98b084f4a78bbb805439a6ebd8689fbbee0a3dfacba/pymongo-4.13.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ac9241b727a69c39117c12ac1e52d817ea472260dadc66262c3fdca0bab0709b", size = 1754593, upload-time = "2025-06-16T18:15:20.096Z" }, - { url = "https://files.pythonhosted.org/packages/df/d9/8fa2eb110291e154f4312779b1a5b815090b8b05a59ecb4f4a32427db1df/pymongo-4.13.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3efc4c515b371a9fa1d198b6e03340985bfe1a55ae2d2b599a714934e7bc61ab", size = 1723637, upload-time = "2025-06-16T18:15:22.048Z" }, - { url = "https://files.pythonhosted.org/packages/27/7b/9863fa60a4a51ea09f5e3cd6ceb231af804e723671230f2daf3bd1b59c2b/pymongo-4.13.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f57a664aa74610eb7a52fa93f2cf794a1491f4f76098343485dd7da5b3bcff06", size = 1693613, upload-time = "2025-06-16T18:15:24.866Z" }, - { url = "https://files.pythonhosted.org/packages/9b/89/a42efa07820a59089836f409a63c96e7a74e33313e50dc39c554db99ac42/pymongo-4.13.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3dcb0b8cdd499636017a53f63ef64cf9b6bd3fd9355796c5a1d228e4be4a4c94", size = 1652745, upload-time = "2025-06-16T18:15:27.078Z" }, - { url = "https://files.pythonhosted.org/packages/6a/cf/2c77d1acda61d281edd3e3f00d5017d3fac0c29042c769efd3b8018cb469/pymongo-4.13.2-cp312-cp312-win32.whl", hash = "sha256:bf43ae07804d7762b509f68e5ec73450bb8824e960b03b861143ce588b41f467", size = 883232, upload-time = "2025-06-16T18:15:29.169Z" }, - { url = "https://files.pythonhosted.org/packages/d2/4f/727f59156e3798850c3c2901f106804053cb0e057ed1bd9883f5fa5aa8fa/pymongo-4.13.2-cp312-cp312-win_amd64.whl", hash = "sha256:812a473d584bcb02ab819d379cd5e752995026a2bb0d7713e78462b6650d3f3a", size = 903304, upload-time = "2025-06-16T18:15:31.346Z" }, - { url = "https://files.pythonhosted.org/packages/e0/95/b44b8e24b161afe7b244f6d43c09a7a1f93308cad04198de1c14c67b24ce/pymongo-4.13.2-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:d6044ca0eb74d97f7d3415264de86a50a401b7b0b136d30705f022f9163c3124", size = 966232, upload-time = "2025-06-16T18:15:33.057Z" }, - { url = "https://files.pythonhosted.org/packages/6d/fc/d4d59799a52033acb187f7bd1f09bc75bebb9fd12cef4ba2964d235ad3f9/pymongo-4.13.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:dd326bcb92d28d28a3e7ef0121602bad78691b6d4d1f44b018a4616122f1ba8b", size = 965935, upload-time = "2025-06-16T18:15:34.826Z" }, - { url = "https://files.pythonhosted.org/packages/07/a8/67502899d89b317ea9952e4769bc193ca15efee561b24b38a86c59edde6f/pymongo-4.13.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dfb0c21bdd58e58625c9cd8de13e859630c29c9537944ec0a14574fdf88c2ac4", size = 1954070, upload-time = "2025-06-16T18:15:36.576Z" }, - { url = "https://files.pythonhosted.org/packages/da/3b/0dac5d81d1af1b96b3200da7ccc52fc261a35efb7d2ac493252eb40a2b11/pymongo-4.13.2-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c9c7d345d57f17b1361008aea78a37e8c139631a46aeb185dd2749850883c7ba", size = 2031424, upload-time = "2025-06-16T18:15:38.723Z" }, - { url = "https://files.pythonhosted.org/packages/31/ed/7a5af49a153224ca7e31e9915703e612ad9c45808cc39540e9dd1a2a7537/pymongo-4.13.2-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8860445a8da1b1545406fab189dc20319aff5ce28e65442b2b4a8f4228a88478", size = 1995339, upload-time = "2025-06-16T18:15:40.474Z" }, - { url = "https://files.pythonhosted.org/packages/f1/e9/9c72eceae8439c4f1bdebc4e6b290bf035e3f050a80eeb74abb5e12ef8e2/pymongo-4.13.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:01c184b612f67d5a4c8f864ae7c40b6cc33c0e9bb05e39d08666f8831d120504", size = 1956066, upload-time = "2025-06-16T18:15:42.272Z" }, - { url = "https://files.pythonhosted.org/packages/ac/79/9b019c47923395d5fced03856996465fb9340854b0f5a2ddf16d47e2437c/pymongo-4.13.2-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ae2ea8c62d5f3c6529407c12471385d9a05f9fb890ce68d64976340c85cd661b", size = 1905642, upload-time = "2025-06-16T18:15:43.978Z" }, - { url = "https://files.pythonhosted.org/packages/93/2f/ebf56c7fa9298fa2f9716e7b66cf62b29e7fc6e11774f3b87f55d214d466/pymongo-4.13.2-cp313-cp313-win32.whl", hash = "sha256:d13556e91c4a8cb07393b8c8be81e66a11ebc8335a40fa4af02f4d8d3b40c8a1", size = 930184, upload-time = "2025-06-16T18:15:46.899Z" }, - { url = "https://files.pythonhosted.org/packages/76/2f/49c35464cbd5d116d950ff5d24b4b20491aaae115d35d40b945c33b29250/pymongo-4.13.2-cp313-cp313-win_amd64.whl", hash = "sha256:cfc69d7bc4d4d5872fd1e6de25e6a16e2372c7d5556b75c3b8e2204dce73e3fb", size = 955111, upload-time = "2025-06-16T18:15:48.85Z" }, - { url = "https://files.pythonhosted.org/packages/57/56/b17c8b5329b1842b7847cf0fa224ef0a272bf2e5126360f4da8065c855a1/pymongo-4.13.2-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:a457d2ac34c05e9e8a6bb724115b093300bf270f0655fb897df8d8604b2e3700", size = 1022735, upload-time = "2025-06-16T18:15:50.672Z" }, - { url = "https://files.pythonhosted.org/packages/83/e6/66fec65a7919bf5f35be02e131b4dc4bf3152b5e8d78cd04b6d266a44514/pymongo-4.13.2-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:02f131a6e61559613b1171b53fbe21fed64e71b0cb4858c47fc9bc7c8e0e501c", size = 1022740, upload-time = "2025-06-16T18:15:53.218Z" }, - { url = "https://files.pythonhosted.org/packages/17/92/cda7383df0d5e71dc007f172c1ecae6313d64ea05d82bbba06df7f6b3e49/pymongo-4.13.2-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8c942d1c6334e894271489080404b1a2e3b8bd5de399f2a0c14a77d966be5bc9", size = 2282430, upload-time = "2025-06-16T18:15:55.356Z" }, - { url = "https://files.pythonhosted.org/packages/84/da/285e05eb1d617b30dc7a7a98ebeb264353a8903e0e816a4eec6487c81f18/pymongo-4.13.2-cp313-cp313t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:850168d115680ab66a0931a6aa9dd98ed6aa5e9c3b9a6c12128049b9a5721bc5", size = 2369470, upload-time = "2025-06-16T18:15:57.5Z" }, - { url = "https://files.pythonhosted.org/packages/89/c0/c0d5eae236de9ca293497dc58fc1e4872382223c28ec223f76afc701392c/pymongo-4.13.2-cp313-cp313t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:af7dfff90647ee77c53410f7fe8ca4fe343f8b768f40d2d0f71a5602f7b5a541", size = 2328857, upload-time = "2025-06-16T18:15:59.59Z" }, - { url = "https://files.pythonhosted.org/packages/2b/5a/d8639fba60def128ce9848b99c56c54c8a4d0cd60342054cd576f0bfdf26/pymongo-4.13.2-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f8057f9bc9c94a8fd54ee4f5e5106e445a8f406aff2df74746f21c8791ee2403", size = 2280053, upload-time = "2025-06-16T18:16:02.166Z" }, - { url = "https://files.pythonhosted.org/packages/a1/69/d56f0897cc4932a336820c5d2470ffed50be04c624b07d1ad6ea75aaa975/pymongo-4.13.2-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:51040e1ba78d6671f8c65b29e2864483451e789ce93b1536de9cc4456ede87fa", size = 2219378, upload-time = "2025-06-16T18:16:04.108Z" }, - { url = "https://files.pythonhosted.org/packages/04/1e/427e7f99801ee318b6331062d682d3816d7e1d6b6013077636bd75d49c87/pymongo-4.13.2-cp313-cp313t-win32.whl", hash = "sha256:7ab86b98a18c8689514a9f8d0ec7d9ad23a949369b31c9a06ce4a45dcbffcc5e", size = 979460, upload-time = "2025-06-16T18:16:06.128Z" }, - { url = "https://files.pythonhosted.org/packages/b5/9c/00301a6df26f0f8d5c5955192892241e803742e7c3da8c2c222efabc0df6/pymongo-4.13.2-cp313-cp313t-win_amd64.whl", hash = "sha256:c38168263ed94a250fc5cf9c6d33adea8ab11c9178994da1c3481c2a49d235f8", size = 1011057, upload-time = "2025-06-16T18:16:07.917Z" }, +sdist = { url = "https://files.pythonhosted.org/packages/bf/1c/f148bb1747c48955dbeea34a53c6d60b858f902c61c62330d277ee806af7/pymongo-4.14.0.tar.gz", hash = "sha256:15674e3fddce78cf134fc4e55f90abf1608a48430130cd35efdf3802fd47a1d1", size = 2213509, upload-time = "2025-08-06T13:41:11.04Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/fd/31/e67483f313f70d7440e820246ec500bf039a9c905c6d374cdd970ed9241d/pymongo-4.14.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:88f9c415c59450c0ac4133aa4745459101619ca7997dc468209bf395563667d2", size = 859161, upload-time = "2025-08-06T13:39:49.139Z" }, + { url = "https://files.pythonhosted.org/packages/b5/9b/afc662756e32922207b99ffc0f3d01cee5a495af0078a675a4dfc901ef75/pymongo-4.14.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:6513474970fdf3afd9dc9de9065a31a1efc8288ca9068510e5e973fa80200c8f", size = 859458, upload-time = "2025-08-06T13:39:50.469Z" }, + { url = "https://files.pythonhosted.org/packages/24/9b/078cc8fe51836f4ec1bc2d49e0cfccfc3b914991213cc50c028d5b268a44/pymongo-4.14.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d60c0e9b13603317062f316906fb5be4000f5b5fe288eb6e9df4ef8695863cd8", size = 1428520, upload-time = "2025-08-06T13:39:52.045Z" }, + { url = "https://files.pythonhosted.org/packages/d2/0e/89864b0ab731927bffb60485238914d9462adbc93061d0c38dd60deb346f/pymongo-4.14.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a203cb757f75804c43aec48f23cb138e890a24219716ce9958041dace39ba470", size = 1479481, upload-time = "2025-08-06T13:39:54.913Z" }, + { url = "https://files.pythonhosted.org/packages/3b/8d/1b8ee7f66804d6ea88f7bfc2cf7fce1bd3b2598cb9e003a4406eb10d7405/pymongo-4.14.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4bdf65bf8167a92b70de44d28ed9df1c2dec83fe2a82e26c01fc89da8ca6bc34", size = 1453925, upload-time = "2025-08-06T13:39:56.234Z" }, + { url = "https://files.pythonhosted.org/packages/4e/0e/97a8e082bbb60b39276f891f74aedb2f5ee97bbdee88690ef313f341690a/pymongo-4.14.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c1f79fdd99db135dbc1678477793764c156c11623d0d9dbe4c57767d081b79b8", size = 1433050, upload-time = "2025-08-06T13:39:57.556Z" }, + { url = "https://files.pythonhosted.org/packages/b0/88/6627ecc431fa63008715dd0927204691d356c996de04d1477fa2317ea706/pymongo-4.14.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:05152a2ca55805c37f77ac473e51915f44bba9a6b32fed875fa9df61d81681ca", size = 1401702, upload-time = "2025-08-06T13:39:59.265Z" }, + { url = "https://files.pythonhosted.org/packages/20/9a/1d253195763c865336536e5f328a86691db5ee642714ea1f12d51491223e/pymongo-4.14.0-cp311-cp311-win32.whl", hash = "sha256:aa25505e36e32bef3fa135578461f24735e9d4b7b62e6aa21eb8f2d163cef86d", size = 838006, upload-time = "2025-08-06T13:40:00.936Z" }, + { url = "https://files.pythonhosted.org/packages/21/0d/9e5243870e2ff2d2c4552d32e22fd4d3079466e3350b91bc3d68b99d19d5/pymongo-4.14.0-cp311-cp311-win_amd64.whl", hash = "sha256:c57aef3b48e8c7818689604ff24e54524e164056ec56ee5ea48384264360bf59", size = 852617, upload-time = "2025-08-06T13:40:02.374Z" }, + { url = "https://files.pythonhosted.org/packages/ca/fd/71936f5188d76e1e5d86749c39fb15f7aaf7fdc0a81d62ca084cad9ed740/pymongo-4.14.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:864f005459ef9b19c60cb96ca755124e0291d211000859f33c62a166f55eba27", size = 914026, upload-time = "2025-08-06T13:40:06.024Z" }, + { url = "https://files.pythonhosted.org/packages/bd/7e/821ec87233b0cdc0cb5b2f9845d7ff52e94e5a37cc05c6d59a3d6c5f6f98/pymongo-4.14.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:69956f971a6f8dafc62e5c83ac21ebf15d5757e13758284f12218ad7fbd3c0fe", size = 913721, upload-time = "2025-08-06T13:40:07.459Z" }, + { url = "https://files.pythonhosted.org/packages/82/67/174f6b92efe4da967e8fcbaa25e59c6cb06efd395cacc9558c1254565031/pymongo-4.14.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:91200f47453a1cb97136629e28f4091a109756ec37067b622f90c4b626b4af8d", size = 1692360, upload-time = "2025-08-06T13:40:08.857Z" }, + { url = "https://files.pythonhosted.org/packages/d1/b5/36e84df138cd4a8280334ed5e6e7f5fa52a9cbe933cd68d9e10c9bca6639/pymongo-4.14.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c32f5e23e9dd31e20800d993f3695b6627664dc7da30ac1669f9716833b33175", size = 1756598, upload-time = "2025-08-06T13:40:10.61Z" }, + { url = "https://files.pythonhosted.org/packages/ad/7f/c6964ce567a4cc6248d7f0959af9b7d9e51837a3ca3d54b15ade6eecf376/pymongo-4.14.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f4407c1ab7514e08d4af0f58cf9d7eddc86e45e458fe46f99a72a5d18dbc71dc", size = 1725641, upload-time = "2025-08-06T13:40:12.07Z" }, + { url = "https://files.pythonhosted.org/packages/7d/5b/827dca0b1b53118a96749a63886c1bbc04bf56b68424038baf7dabc98380/pymongo-4.14.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0ec6666e599ee10cc5cde0cc6a8519373384a14af3a310ede1bf177105f38fb0", size = 1695618, upload-time = "2025-08-06T13:40:13.845Z" }, + { url = "https://files.pythonhosted.org/packages/9e/45/f53f6531836f9da26b753c60e0d0a0c6f22ac023ba8ef1286addf56ce86f/pymongo-4.14.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a809a982a843bb561c7030059d54ea7f1dcc967cc72a45f1435695e2a2a515a5", size = 1654748, upload-time = "2025-08-06T13:40:15.401Z" }, + { url = "https://files.pythonhosted.org/packages/83/3b/4b33d36c00a0c44889322b8e9a0650aa5668bc531f6301f01ad7a242d120/pymongo-4.14.0-cp312-cp312-win32.whl", hash = "sha256:3866d031fcbe81d7677c078026e650aeef8915560ba758a28051debce38f6b77", size = 884844, upload-time = "2025-08-06T13:40:17.21Z" }, + { url = "https://files.pythonhosted.org/packages/eb/f6/68b52e16fb831c246171379ae2115cc8cb282f6b7b47fbe7fb8cc4b9df1f/pymongo-4.14.0-cp312-cp312-win_amd64.whl", hash = "sha256:91b8de871a40225bbe4f92d6dc3f20c26bf838e49d3563592131401af0d665a6", size = 904277, upload-time = "2025-08-06T13:40:18.722Z" }, + { url = "https://files.pythonhosted.org/packages/97/99/d6c145e57387bfa2b6d90f4f5285f2b0903625733dcc6403aa9d7abeddbb/pymongo-4.14.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:54576faf8e6fefe17886a201f9df61bdf728ccac9a7a0847095a0e8480cd6ec1", size = 968259, upload-time = "2025-08-06T13:40:20.551Z" }, + { url = "https://files.pythonhosted.org/packages/8d/36/5226af83554bbfa0d754aa1ab022af92f64a2376604d56c9a8c50e247b85/pymongo-4.14.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:c64ef5e58adedecb853a680eb5d1aea50770197f212e202d6eb50c801797b576", size = 967959, upload-time = "2025-08-06T13:40:21.962Z" }, + { url = "https://files.pythonhosted.org/packages/6d/f5/4e627e3e5230e8c62c5fe218b5cb1347a1b01932fd6446c3f03e18ec29c5/pymongo-4.14.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5f1c0cdddf9783065bf55d3fe025843c0974a828bafc9bb5514ae28dd2828a40", size = 1956074, upload-time = "2025-08-06T13:40:23.413Z" }, + { url = "https://files.pythonhosted.org/packages/16/05/989cfdc8536245a55a549f76ba20356822ebfce752e72a5164bd0795ace0/pymongo-4.14.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:22805d4fa587b526ac3129ee634a8761abbeb76883045718438f5b8e72f91ce6", size = 2033427, upload-time = "2025-08-06T13:40:25.325Z" }, + { url = "https://files.pythonhosted.org/packages/93/a5/fafef0230fa6cc5d4bb5addcea77d8b71f4eca4d31a5a596d26398aaa45a/pymongo-4.14.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7c3e6e669cf36b27694de2730f5d5b31ef492ffe99446563192c4e8ee84ca859", size = 1997344, upload-time = "2025-08-06T13:40:26.91Z" }, + { url = "https://files.pythonhosted.org/packages/9b/6c/1170d5c8e087832dba2b0930ec90bafde8a59efa37b521d9a5902ec9d282/pymongo-4.14.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9763a2477d5388df65ab6f591cf0cb7fd34f4a66f873c31e63288fd79887742c", size = 1958072, upload-time = "2025-08-06T13:40:28.865Z" }, + { url = "https://files.pythonhosted.org/packages/27/7b/bb07c7c4c102046ff92f3acd05d85870e06a08df2a0fcd2e87586f2516fe/pymongo-4.14.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:88d6f5aceab3528b8760942a57635599925f7fd743691f9d759a02124207dfd0", size = 1907647, upload-time = "2025-08-06T13:40:30.435Z" }, + { url = "https://files.pythonhosted.org/packages/21/e6/c0ee2163133f1a437cd3b0308521e24181b69b5510b9eabde9ee86999c12/pymongo-4.14.0-cp313-cp313-win32.whl", hash = "sha256:e283feafde118cbbb03adc036b882be042b0a2eca121ec5d6bbec3e12980e8fa", size = 931673, upload-time = "2025-08-06T13:40:31.956Z" }, + { url = "https://files.pythonhosted.org/packages/ec/1d/1692f0696d8e6bcb3e43469999eeb92d5f0acdb9a50aca2c869820321df9/pymongo-4.14.0-cp313-cp313-win_amd64.whl", hash = "sha256:a29b62d421a512833e99d4781b64e695cfe23b4c4a9159ea83e56fc2660f2480", size = 955893, upload-time = "2025-08-06T13:40:33.461Z" }, + { url = "https://files.pythonhosted.org/packages/fd/19/f3ed531d7151dc2d1be27746c206e74403283873ec5d12170616982eccb0/pymongo-4.14.0-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:67225d5fb1be2a34c6f73cde9d81a8041a095a94ed2433e2cf9e2f1657443def", size = 1024757, upload-time = "2025-08-06T13:40:35.277Z" }, + { url = "https://files.pythonhosted.org/packages/36/f9/38782d41e16d11ba540ddfc618104e249a5b950a446b8a77a17f3416c6e6/pymongo-4.14.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:bbf0dbe8554978c1271bdc82366852245a03ab124a1387b6f3531f64adac3c39", size = 1024765, upload-time = "2025-08-06T13:40:36.797Z" }, + { url = "https://files.pythonhosted.org/packages/82/75/b385aa6ed09d077b47b00d4bc3c4b9bdac97b50fde3be7599ff9ceef8cbc/pymongo-4.14.0-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b7f61561cbc7426ffc2d37b46e65ab5323fc366644f70c8e2240ed5452e2c402", size = 2284433, upload-time = "2025-08-06T13:40:38.35Z" }, + { url = "https://files.pythonhosted.org/packages/f2/29/d0dbf281e58e26dbeef4e972905b371c63d33c5aa8caa0d58140224bfee5/pymongo-4.14.0-cp313-cp313t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:502cd551679753fb7838221c3bbb963da4b4aa0576848192afb1f78128ff729a", size = 2371473, upload-time = "2025-08-06T13:40:39.852Z" }, + { url = "https://files.pythonhosted.org/packages/db/eb/089dfc96a29881ed17964705ea254da2f8b3aebf9754abd6aaa8125e1589/pymongo-4.14.0-cp313-cp313t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cba6b59afef2b47c4859bf36115fa577330601b93e39d04f39fcc6103e801286", size = 2330862, upload-time = "2025-08-06T13:40:41.803Z" }, + { url = "https://files.pythonhosted.org/packages/35/07/b4f1215314e5f1114a899c33f17219b1f590502c736058c50571fa189ed1/pymongo-4.14.0-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1c0fa103f978c15f7c2f0d7b2e010c24c327432a0310503bc0ec93c5f9be9e81", size = 2282058, upload-time = "2025-08-06T13:40:43.467Z" }, + { url = "https://files.pythonhosted.org/packages/c0/a1/87340e5a38003ef3591fdfc4b911fb32531b0dbbed8ab2431006858590fe/pymongo-4.14.0-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5fe93676afe37794f01b8df5cf16528dce4d7d174cdf51ea1586c234eb5263c2", size = 2221380, upload-time = "2025-08-06T13:40:45.256Z" }, + { url = "https://files.pythonhosted.org/packages/0a/68/3970d18b0b58c4681598bc1e233f33c15ff0c388422b17ddd195e214f35d/pymongo-4.14.0-cp313-cp313t-win32.whl", hash = "sha256:1462fc2bb39527f01eea5378172b66c45d62e22fa4be957afe2ec747c4d2ff51", size = 980892, upload-time = "2025-08-06T13:40:46.908Z" }, + { url = "https://files.pythonhosted.org/packages/2d/fa/68b1555e62ed3ee87f8a2de99d5fb840cf045748da4488870b4dced44a95/pymongo-4.14.0-cp313-cp313t-win_amd64.whl", hash = "sha256:e506af9b25aac77cc5c5ea4a72f81764e4f5ea90ca799aac43d665ab269f291d", size = 1011181, upload-time = "2025-08-06T13:40:48.641Z" }, ] [[package]] @@ -748,16 +748,16 @@ wheels = [ [[package]] name = "virtualenv" -version = "20.33.0" +version = "20.33.1" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "distlib" }, { name = "filelock" }, { name = "platformdirs" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/db/2e/8a70dcbe8bf15213a08f9b0325ede04faca5d362922ae0d62ef0fa4b069d/virtualenv-20.33.0.tar.gz", hash = "sha256:47e0c0d2ef1801fce721708ccdf2a28b9403fa2307c3268aebd03225976f61d2", size = 6082069, upload-time = "2025-08-03T08:09:19.014Z" } +sdist = { url = "https://files.pythonhosted.org/packages/8b/60/4f20960df6c7b363a18a55ab034c8f2bcd5d9770d1f94f9370ec104c1855/virtualenv-20.33.1.tar.gz", hash = "sha256:1b44478d9e261b3fb8baa5e74a0ca3bc0e05f21aa36167bf9cbf850e542765b8", size = 6082160, upload-time = "2025-08-05T16:10:55.605Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/43/87/b22cf40cdf7e2b2bf83f38a94d2c90c5ad6c304896e5a12d0c08a602eb59/virtualenv-20.33.0-py3-none-any.whl", hash = "sha256:106b6baa8ab1b526d5a9b71165c85c456fbd49b16976c88e2bc9352ee3bc5d3f", size = 6060205, upload-time = "2025-08-03T08:09:16.674Z" }, + { url = "https://files.pythonhosted.org/packages/ca/ff/ded57ac5ff40a09e6e198550bab075d780941e0b0f83cbeabd087c59383a/virtualenv-20.33.1-py3-none-any.whl", hash = "sha256:07c19bc66c11acab6a5958b815cbcee30891cd1c2ccf53785a28651a0d8d8a67", size = 6060362, upload-time = "2025-08-05T16:10:52.81Z" }, ] [[package]] From b596fe1508bd8e5327c9a132e04948735dc9b75b Mon Sep 17 00:00:00 2001 From: HugoOnghai Date: Mon, 14 Jul 2025 10:50:00 -0700 Subject: [PATCH 04/65] Added linting workflow, hopefully it works --- .github/workflows/lint.yml | 4 ---- pyproject.toml | 2 +- 2 files changed, 1 insertion(+), 5 deletions(-) diff --git a/.github/workflows/lint.yml b/.github/workflows/lint.yml index c854112..1a76e83 100644 --- a/.github/workflows/lint.yml +++ b/.github/workflows/lint.yml @@ -5,7 +5,6 @@ on: branches: [master] pull_request: branches: [master] -<<<<<<< HEAD workflow_dispatch: jobs: @@ -22,6 +21,3 @@ jobs: - name: Analyzing the code with ruff run: uvx ruff check --output-format=github -======= -# TODO: setup linting with uv/ruff ->>>>>>> 52382ff (Merged upstream (#1)) diff --git a/pyproject.toml b/pyproject.toml index 52babb3..e93ed31 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -5,7 +5,7 @@ description = "Add your description here" readme = "README.md" authors = [ { name = "The Materials Project", email = "feedback@materialsproject.org" }, - { name = "Hugo Onghai", email = "" }, + { name = "Hugo Onghai", email = "hugoonghai@g.ucla.edu" }, { name = "Tyler Mathis", email = "35553152+tsmathis@users.noreply.github.com" }, ] maintainers = [ From 9a297de19ef2c011b7a31620e0ca25d574bc53eb Mon Sep 17 00:00:00 2001 From: HugoOnghai Date: Wed, 16 Jul 2025 10:37:37 -0700 Subject: [PATCH 05/65] New Branch for Linting Workflow --- .gitignore | 11 ++++++++++- 1 file changed, 10 insertions(+), 1 deletion(-) diff --git a/.gitignore b/.gitignore index 8241d4c..d35bdfc 100644 --- a/.gitignore +++ b/.gitignore @@ -210,6 +210,15 @@ __marimo__/ # Streamlit .streamlit/secrets.toml +<<<<<<< HEAD json_pages/ notebooks/ -test_json_pages/ \ No newline at end of file +test_json_pages/ +======= +# json files for storing production records +*.json +.env +/json_pages +/notebooks +/test_json_pages +>>>>>>> b991f09 (New Branch for Linting Workflow) From 3388de033f4aa7cd6a79dec9a542942588b6ffa9 Mon Sep 17 00:00:00 2001 From: HugoOnghai Date: Thu, 17 Jul 2025 15:31:57 -0700 Subject: [PATCH 06/65] Testing Linting workflow --- .github/workflows/lint.yml | 31 +++++++++++++++++++++++++++++++ src/mp_cite/core.py | 7 +++++-- 2 files changed, 36 insertions(+), 2 deletions(-) diff --git a/.github/workflows/lint.yml b/.github/workflows/lint.yml index 1a76e83..522a6bc 100644 --- a/.github/workflows/lint.yml +++ b/.github/workflows/lint.yml @@ -7,6 +7,7 @@ on: branches: [master] workflow_dispatch: +<<<<<<< HEAD jobs: linting: runs-on: ubuntu-latest @@ -21,3 +22,33 @@ jobs: - name: Analyzing the code with ruff run: uvx ruff check --output-format=github +======= +# TODO: setup linting with uv/ruff +# informed by testing.yml and https://medium.com/@sidharthvijayakumar7/automating-pylint-in-github-workflow-80c84b2ff243 and ruff documentation +jobs: + linting: + strategy: + matrix: + os: ["ubuntu-latest"] + python-version: ["3.11", "3.12", "3.13"] + + name: mp-cite (${{ matrix.os }}/py${{ matrix.python-version }}) + runs-on: ${{ matrix.os }} + + steps: + - uses: actions/checkout@v4 + + - name: Install uv + uses: astral-sh/setup-uv@v6 + with: + python-version: ${{ matrix.python-version }} + version: "latest" + + - name: Install dependencies + run: | + python -m pip install --upgrade pip + pip install ruff + # Update output format to enable automatic inline annotations + - name: Analyzing the code with ruff + run: ruff check --output-format=github +>>>>>>> 7c6a8e7 (Testing Linting workflow) diff --git a/src/mp_cite/core.py b/src/mp_cite/core.py index bb78102..246fdcc 100644 --- a/src/mp_cite/core.py +++ b/src/mp_cite/core.py @@ -206,12 +206,15 @@ def delete_osti_record(elinkapi: Elink, osti_id: OstiID, reason: str) -> bool: def delete_osti_record( - elinkapi: Elink, + elinkapi_token: str, osti_id: OstiID, reason: str ) -> RecordResponse: + review_endpoint = "https://review.osti.gov/elink2api/" + review_api = Elink(token = elinkapi_token, target=review_endpoint) + """Delete a record by its OSTI ID.""" - response = requests.delete(f"{elinkapi.target}records/{osti_id}?reason={reason}", headers={"Authorization": f"Bearer {elinkapi.token}"}) + response = requests.delete(f"{review_api.target}records/{osti_id}?reason={reason}", headers={"Authorization": f"Bearer {review_api.token}"}) Validation.handle_response(response) return response.status_code == 204 # True if deleted successfully From bbba14b9340f9e3a391e71313f18355816f10c19 Mon Sep 17 00:00:00 2001 From: HugoOnghai Date: Thu, 17 Jul 2025 15:37:26 -0700 Subject: [PATCH 07/65] Allowing Lint.YML to run on push to linting_workflow --- .github/workflows/lint.yml | 31 ------------------------------- 1 file changed, 31 deletions(-) diff --git a/.github/workflows/lint.yml b/.github/workflows/lint.yml index 522a6bc..1a76e83 100644 --- a/.github/workflows/lint.yml +++ b/.github/workflows/lint.yml @@ -7,7 +7,6 @@ on: branches: [master] workflow_dispatch: -<<<<<<< HEAD jobs: linting: runs-on: ubuntu-latest @@ -22,33 +21,3 @@ jobs: - name: Analyzing the code with ruff run: uvx ruff check --output-format=github -======= -# TODO: setup linting with uv/ruff -# informed by testing.yml and https://medium.com/@sidharthvijayakumar7/automating-pylint-in-github-workflow-80c84b2ff243 and ruff documentation -jobs: - linting: - strategy: - matrix: - os: ["ubuntu-latest"] - python-version: ["3.11", "3.12", "3.13"] - - name: mp-cite (${{ matrix.os }}/py${{ matrix.python-version }}) - runs-on: ${{ matrix.os }} - - steps: - - uses: actions/checkout@v4 - - - name: Install uv - uses: astral-sh/setup-uv@v6 - with: - python-version: ${{ matrix.python-version }} - version: "latest" - - - name: Install dependencies - run: | - python -m pip install --upgrade pip - pip install ruff - # Update output format to enable automatic inline annotations - - name: Analyzing the code with ruff - run: ruff check --output-format=github ->>>>>>> 7c6a8e7 (Testing Linting workflow) From d7a7e393655f82ab492346f31881c4be220d7c28 Mon Sep 17 00:00:00 2001 From: HugoOnghai Date: Thu, 17 Jul 2025 16:25:37 -0700 Subject: [PATCH 08/65] Testing pre-commit and updated lint.yml to disregard legacy files --- .gitignore | 4 +-- pyproject.toml | 41 ++++++++++++++++++++++ src/mp_cite/core.py | 83 --------------------------------------------- 3 files changed, 43 insertions(+), 85 deletions(-) diff --git a/.gitignore b/.gitignore index d35bdfc..6b641e9 100644 --- a/.gitignore +++ b/.gitignore @@ -183,9 +183,9 @@ cython_debug/ .abstra/ # Visual Studio Code -# Visual Studio Code specific template is maintained in a separate VisualStudioCode.gitignore +# Visual Studio Code specific template is maintained in a separate VisualStudioCode.gitignore # that can be found at https://github.com/github/gitignore/blob/main/Global/VisualStudioCode.gitignore -# and can be added to the global gitignore or merged into this file. However, if you prefer, +# and can be added to the global gitignore or merged into this file. However, if you prefer, # you could uncomment the following to ignore the entire vscode folder .vscode/ diff --git a/pyproject.toml b/pyproject.toml index e93ed31..9c723eb 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -50,15 +50,51 @@ Issues = "https://github.com/materialsproject/MPCite/issues" [tool.ruff] # Exclude a variety of commonly ignored directories. exclude = [ +<<<<<<< HEAD "legacy", "notebooks", "uv.lock" +======= + ".bzr", + ".direnv", + ".eggs", + ".git", + ".git-rewrite", + ".hg", + ".ipynb_checkpoints", + ".mypy_cache", + ".nox", + ".pants.d", + ".pyenv", + ".pytest_cache", + ".pytype", + ".ruff_cache", + ".svn", + ".tox", + ".venv", + ".vscode", + "__pypackages__", + "_build", + "buck-out", + "build", + "dist", + "node_modules", + "site-packages", + "venv", + "legacy" +>>>>>>> d9ab19c (Testing pre-commit and updated lint.yml to disregard legacy files) ] # Same as Black. line-length = 88 indent-width = 4 +<<<<<<< HEAD +======= +# Assume Python 3.9 +target-version = "py39" + +>>>>>>> d9ab19c (Testing pre-commit and updated lint.yml to disregard legacy files) [tool.ruff.lint] # Enable Pyflakes (`F`) and a subset of the pycodestyle (`E`) codes by default. # Unlike Flake8, Ruff doesn't enable pycodestyle warnings (`W`) or @@ -70,8 +106,13 @@ ignore = [] fixable = ["ALL"] unfixable = [] +<<<<<<< HEAD # Allow NO unused variables to exist in the codebase. If underscore-prefixed unused variables are permissible, use this regex $^(_+|(_+[a-zA-Z0-9_]*[a-zA-Z0-9]+?))$" dummy-variable-rgx = "^$" +======= +# Allow unused variables when underscore-prefixed. +dummy-variable-rgx = "^(_+|(_+[a-zA-Z0-9_]*[a-zA-Z0-9]+?))$" +>>>>>>> d9ab19c (Testing pre-commit and updated lint.yml to disregard legacy files) [tool.ruff.format] # Like Black, use double quotes for strings. diff --git a/src/mp_cite/core.py b/src/mp_cite/core.py index 246fdcc..8f081b4 100644 --- a/src/mp_cite/core.py +++ b/src/mp_cite/core.py @@ -16,7 +16,6 @@ def find_out_of_date_doi_entries( doi_db: str, doi_collection: str, ) -> list[OstiID]: -<<<<<<< HEAD """ find_out_of_date_doi_entries queries MP's mongo collections to find all robocrys documents that were updated less recently than the latest doi document @@ -29,8 +28,6 @@ def find_out_of_date_doi_entries( returns a list containing all OSTI IDs associated with out-of-date doi entries. """ -======= ->>>>>>> 5fa46e4 (Merged upstream (#1)) robocrys = rc_client[robocrys_db][robocrys_collection] dois = doi_client[doi_db][doi_collection] @@ -65,7 +62,6 @@ def find_out_of_date_doi_entries( def update_existing_osti_record( -<<<<<<< HEAD elinkapi: Elink, osti_id: OstiID, new_values: dict ) -> RecordResponse: """ @@ -97,33 +93,10 @@ def update_existing_osti_record( return elinkapi.update_record( osti_id, record_on_elink, state="save" ) # user should use update_state_of_osti_record to submit instead -======= - elinkapi: Elink, - osti_id: OstiID, - new_values: dict -) -> RecordResponse: - record_on_elink = elinkapi.get_single_record(osti_id) - - for keyword in new_values.keys(): - try: - setattr(record_on_elink, keyword, new_values[keyword]) - except ValueError: - print("Extraneous keywords found in the dictionary that do not correspond to attributes in the ELink API's record class.") - - # assume the use with fix the sponsor identifier bug before calling the update function - # # fix the issue with the sponsor organization's identifiers - # for entry in record_on_elink.organizations: - # if entry.type == "SPONSOR": - # entry.identifiers = [{"type": 'CN_DOE', "value": 'AC02-05CH11231'}] - # break - - return elinkapi.update_record(osti_id, record_on_elink, state="save") # user should use update_state_of_osti_record to submit instead ->>>>>>> 5fa46e4 (Merged upstream (#1)) def submit_new_osti_record( elinkapi: Elink, -<<<<<<< HEAD new_values: dict, state="submit", ) -> RecordResponse: @@ -143,20 +116,12 @@ def submit_new_osti_record( new_record = MinimumDARecord( **new_values ) # record is an instance of the MinimumDARecord model which gives default values to all necessary fields (EXCEPT Title) -======= - new_record: Record, - state = "submit", # assuming there is no need to both with saving. just send new record to osti when its ready for submission. also assume bug with DOE contract number identifier in sponsor organization is accounted for -) -> RecordResponse: - # template for all repeated stuff - # only submit ->>>>>>> 5fa46e4 (Merged upstream (#1)) record_response = elinkapi.post_new_record(new_record, state) return record_response def update_state_of_osti_record( -<<<<<<< HEAD elinkapi: Elink, osti_id: OstiID, new_state: Literal["save", "submit"] ) -> RecordResponse: """ @@ -188,51 +153,3 @@ def delete_osti_record(elinkapi: Elink, osti_id: OstiID, reason: str) -> bool: ) Validation.handle_response(response) return response.status_code == 204 # True if deleted successfully -======= - elinkapi: Elink, - osti_id: OstiID, - new_state = "submit" -) -> RecordResponse: - record = elinkapi.get_single_record(osti_id) - - # assuming that the user will handle the sponsor identifier bug before calling this function - # # fix the issue with the sponsor organization's identifiers - # for entry in record.organizations: - # if entry.type == "SPONSOR": - # entry.identifiers = [{"type": 'CN_DOE', "value": 'AC02-05CH11231'}] - # break - - return elinkapi.update_record(osti_id, record, new_state) - - -def delete_osti_record( - elinkapi_token: str, - osti_id: OstiID, - reason: str -) -> RecordResponse: - review_endpoint = "https://review.osti.gov/elink2api/" - review_api = Elink(token = elinkapi_token, target=review_endpoint) - - """Delete a record by its OSTI ID.""" - response = requests.delete(f"{review_api.target}records/{osti_id}?reason={reason}", headers={"Authorization": f"Bearer {review_api.token}"}) - Validation.handle_response(response) - return response.status_code == 204 # True if deleted successfully - -def emptyReviewAPI(reason, review_api): - allDeleted = True - for record in review_api.query_records(): - delete_osti_record(review_api, record.osti_id, reason) - -def make_minimum_record_to_fully_release( - title, # required to make record - product_type = "DA", # required to make record - organizations = [Organization(type='RESEARCHING', name='LBNL Materials Project (LBNL-MP)'), - Organization(type='SPONSOR', name='TEST SPONSOR ORG', identifiers=[{"type": 'CN_DOE', "value": 'AC02-05CH11231'}])], # sponsor org is necessary for submission - persons = [Person(type='AUTHOR', last_name='Perrson')], - site_ownership_code = "LBNL-MP", - access_limitations = ['UNL'], - publication_date = datetime.now().replace(hour=0, minute=0, second=0, microsecond=0), # what should this be? - site_url = "https://next-gen.materialsproject.org/materials" -) -> Record: - return Record(product_type, title, persons, site_ownership_code, access_limitations, publication_date, site_url) ->>>>>>> 5fa46e4 (Merged upstream (#1)) From da968b79047edf5128466cdecd5c74a35152359e Mon Sep 17 00:00:00 2001 From: HugoOnghai Date: Thu, 17 Jul 2025 16:42:06 -0700 Subject: [PATCH 09/65] Using uv to install ruff dependency, using uv-cache and removing pip installation --- .github/workflows/lint.yml | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/.github/workflows/lint.yml b/.github/workflows/lint.yml index 1a76e83..4fa79be 100644 --- a/.github/workflows/lint.yml +++ b/.github/workflows/lint.yml @@ -19,5 +19,12 @@ jobs: python-version: "3.12" version: "latest" +<<<<<<< HEAD +======= + - name: Install ruff + run: | + uv pip install ruff + # Update output format to enable automatic inline annotations +>>>>>>> 27ac04f (Using uv to install ruff dependency, using uv-cache and removing pip installation) - name: Analyzing the code with ruff run: uvx ruff check --output-format=github From 7e5b74d716ba1dfc18c250acae6505c5c0cc81ba Mon Sep 17 00:00:00 2001 From: HugoOnghai Date: Thu, 17 Jul 2025 16:49:26 -0700 Subject: [PATCH 10/65] added new action to install virtual environment before attempting to install dependency --- .github/workflows/lint.yml | 7 ------- 1 file changed, 7 deletions(-) diff --git a/.github/workflows/lint.yml b/.github/workflows/lint.yml index 4fa79be..1a76e83 100644 --- a/.github/workflows/lint.yml +++ b/.github/workflows/lint.yml @@ -19,12 +19,5 @@ jobs: python-version: "3.12" version: "latest" -<<<<<<< HEAD -======= - - name: Install ruff - run: | - uv pip install ruff - # Update output format to enable automatic inline annotations ->>>>>>> 27ac04f (Using uv to install ruff dependency, using uv-cache and removing pip installation) - name: Analyzing the code with ruff run: uvx ruff check --output-format=github From 2cec801318a2d7707ca9a7676238c1cb081dd6d5 Mon Sep 17 00:00:00 2001 From: HugoOnghai Date: Thu, 17 Jul 2025 17:29:19 -0700 Subject: [PATCH 11/65] attempting ruff check again now that is seems to work... --- pyproject.toml | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/pyproject.toml b/pyproject.toml index 9c723eb..12cf519 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -81,8 +81,14 @@ exclude = [ "node_modules", "site-packages", "venv", +<<<<<<< HEAD "legacy" >>>>>>> d9ab19c (Testing pre-commit and updated lint.yml to disregard legacy files) +======= + "legacy", + "notebooks", + "uv.lock" +>>>>>>> c4d9a8f (attempting ruff check again now that is seems to work...) ] # Same as Black. From 70a9f4d9952a72503cef19d418c170ccd7d39703 Mon Sep 17 00:00:00 2001 From: HugoOnghai Date: Mon, 21 Jul 2025 13:26:41 -0700 Subject: [PATCH 12/65] Removed assume python 3.9 from ruff config in pyproject.toml --- pyproject.toml | 47 ----------------------------------------------- 1 file changed, 47 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index 12cf519..e93ed31 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -50,57 +50,15 @@ Issues = "https://github.com/materialsproject/MPCite/issues" [tool.ruff] # Exclude a variety of commonly ignored directories. exclude = [ -<<<<<<< HEAD "legacy", "notebooks", "uv.lock" -======= - ".bzr", - ".direnv", - ".eggs", - ".git", - ".git-rewrite", - ".hg", - ".ipynb_checkpoints", - ".mypy_cache", - ".nox", - ".pants.d", - ".pyenv", - ".pytest_cache", - ".pytype", - ".ruff_cache", - ".svn", - ".tox", - ".venv", - ".vscode", - "__pypackages__", - "_build", - "buck-out", - "build", - "dist", - "node_modules", - "site-packages", - "venv", -<<<<<<< HEAD - "legacy" ->>>>>>> d9ab19c (Testing pre-commit and updated lint.yml to disregard legacy files) -======= - "legacy", - "notebooks", - "uv.lock" ->>>>>>> c4d9a8f (attempting ruff check again now that is seems to work...) ] # Same as Black. line-length = 88 indent-width = 4 -<<<<<<< HEAD -======= -# Assume Python 3.9 -target-version = "py39" - ->>>>>>> d9ab19c (Testing pre-commit and updated lint.yml to disregard legacy files) [tool.ruff.lint] # Enable Pyflakes (`F`) and a subset of the pycodestyle (`E`) codes by default. # Unlike Flake8, Ruff doesn't enable pycodestyle warnings (`W`) or @@ -112,13 +70,8 @@ ignore = [] fixable = ["ALL"] unfixable = [] -<<<<<<< HEAD # Allow NO unused variables to exist in the codebase. If underscore-prefixed unused variables are permissible, use this regex $^(_+|(_+[a-zA-Z0-9_]*[a-zA-Z0-9]+?))$" dummy-variable-rgx = "^$" -======= -# Allow unused variables when underscore-prefixed. -dummy-variable-rgx = "^(_+|(_+[a-zA-Z0-9_]*[a-zA-Z0-9]+?))$" ->>>>>>> d9ab19c (Testing pre-commit and updated lint.yml to disregard legacy files) [tool.ruff.format] # Like Black, use double quotes for strings. From 5abbf4b573e16645c38ddd81a22c27b50250c44c Mon Sep 17 00:00:00 2001 From: HugoOnghai Date: Thu, 24 Jul 2025 10:30:01 -0700 Subject: [PATCH 13/65] Attempting first run of testing suite with github actions --- src/mp_cite/core.py | 4 +- test_media_files/another_media_file.txt | 7 + test_media_files/best_media_file.txt | 3 + test_media_files/media_file.txt | 1 + tests/conf_test.py | 55 ++++++ tests/conftest.py | 11 -- tests/elink_service_test.py | 16 ++ tests/elinkapi_test.py | 234 ++++++++++++++++++++++++ 8 files changed, 318 insertions(+), 13 deletions(-) create mode 100644 test_media_files/another_media_file.txt create mode 100644 test_media_files/best_media_file.txt create mode 100644 test_media_files/media_file.txt create mode 100644 tests/conf_test.py delete mode 100644 tests/conftest.py create mode 100644 tests/elinkapi_test.py diff --git a/src/mp_cite/core.py b/src/mp_cite/core.py index 8f081b4..c7e54e6 100644 --- a/src/mp_cite/core.py +++ b/src/mp_cite/core.py @@ -3,7 +3,7 @@ from pymongo import MongoClient import requests from elinkapi.utils import Validation -from mp_cite.doi_builder import MinimumDARecord +from mp_cite.models import MinimumDARecord from typing import Literal, TypeAlias OstiID: TypeAlias = int @@ -152,4 +152,4 @@ def delete_osti_record(elinkapi: Elink, osti_id: OstiID, reason: str) -> bool: headers={"Authorization": f"Bearer {elinkapi.token}"}, ) Validation.handle_response(response) - return response.status_code == 204 # True if deleted successfully + return response.status_code == 204 # True if deleted successfully \ No newline at end of file diff --git a/test_media_files/another_media_file.txt b/test_media_files/another_media_file.txt new file mode 100644 index 0000000..9a64dd0 --- /dev/null +++ b/test_media_files/another_media_file.txt @@ -0,0 +1,7 @@ +WOWWWWWWWWWWWW + + + + + +O \ No newline at end of file diff --git a/test_media_files/best_media_file.txt b/test_media_files/best_media_file.txt new file mode 100644 index 0000000..c708781 --- /dev/null +++ b/test_media_files/best_media_file.txt @@ -0,0 +1,3 @@ +Not actually the best + +Sue Me \ No newline at end of file diff --git a/test_media_files/media_file.txt b/test_media_files/media_file.txt new file mode 100644 index 0000000..c9d49e9 --- /dev/null +++ b/test_media_files/media_file.txt @@ -0,0 +1 @@ +This is a media file. The text is here. that is all. \ No newline at end of file diff --git a/tests/conf_test.py b/tests/conf_test.py new file mode 100644 index 0000000..4a25e36 --- /dev/null +++ b/tests/conf_test.py @@ -0,0 +1,55 @@ +import os +import pytest +from elinkapi import Elink, exceptions +from dotenv import load_dotenv + +load_dotenv() + + +@pytest.fixture +def elink_review_client(): + """ + tests whether or not the elink review client can be properly retrieved. + returns the elink review client + """ + elink_review_api_key = os.getenv("elink_review_api_token") + review_endpoint = os.getenv("ELINK_REVIEW_ENDPOINT") + return Elink(token=elink_review_api_key, target=review_endpoint) + + +@pytest.fixture +def elink_production_client(): + """ + tests whether or not the elink review client can be properly retrieved. + returns the elink review client + """ + elink_prod_api_key = os.getenv("elink_api_PRODUCTION_key") + return Elink(token=elink_prod_api_key) + + +def test_get_single_record(elink_production_client): + try: + record = elink_production_client.get_single_record(1190959) + assert record.title == "Materials Data on Si by Materials Project" + assert record.osti_id == 1190959 + except exceptions.ForbiddenException as fe: + pytest.fail( + f"Forbidden: Check API key or permissions associated with provided API key. {fe}" + ) + except exceptions.BadRequestException as ve: + pytest.fail(f"Bad Request: Possibly incorrect parameters. {ve}") + except Exception as e: + pytest.fail(f"Unexpected error: {e}") + + +def test_query_records(elink_production_client): + try: + elink_production_client.query_records() + except exceptions.ForbiddenException as fe: + pytest.fail( + f"Forbidden: Check API key or permissions associated with provided API key. {fe}" + ) + except exceptions.BadRequestException as ve: + pytest.fail(f"Bad Request: Possibly incorrect parameters. {ve}") + except Exception as e: + pytest.fail(f"Unexpected error: {e}") diff --git a/tests/conftest.py b/tests/conftest.py deleted file mode 100644 index 0e4eaf1..0000000 --- a/tests/conftest.py +++ /dev/null @@ -1,11 +0,0 @@ -import os - -import pytest -from elinkapi import Elink - - -@pytest.fixture -def elink_review_client(): - review_endpoint = os.getenv("ELINK_REVIEW_ENDPOINT") - elink_review_api_key = os.getenv("ELINK_REVIEW_API_TOKEN") - return Elink(token=elink_review_api_key, target=review_endpoint) diff --git a/tests/elink_service_test.py b/tests/elink_service_test.py index 0857d35..3e1c2b1 100644 --- a/tests/elink_service_test.py +++ b/tests/elink_service_test.py @@ -1,14 +1,30 @@ +from elinkapi import Elink from elinkapi.record import RecordResponse +import pytest +import os +from dotenv import load_dotenv + +load_dotenv() # TODO: Write tests that verify our usage of Elink is correct, # and make sure any upstream breaking changes get caught # here when version upgrades happen + # 1. general query logic + params that we use regularly? # 2. make sure we can submit a correctly templated dataset submission # 3. make sure record updates work # 4. deleting records? # 5+. test any other surfaces of the Elink api that we interact with +@pytest.fixture +def elink_review_client(): + """ + tests whether or not the elink review client can be properly retrieved. + returns the elink review client + """ + elink_review_api_key = os.getenv("elink_review_api_token") + review_endpoint = os.getenv("ELINK_REVIEW_ENDPOINT") + return Elink(token=elink_review_api_key, target=review_endpoint) def test_elink_query(elink_review_client): diff --git a/tests/elinkapi_test.py b/tests/elinkapi_test.py new file mode 100644 index 0000000..0eb6725 --- /dev/null +++ b/tests/elinkapi_test.py @@ -0,0 +1,234 @@ +import pytest +from elinkapi import Elink, Record, exceptions +import os +from src.mp_cite.core import make_minimum_record_to_fully_release +from dotenv import load_dotenv +from datetime import datetime + +load_dotenv() + + +valid_save_json = { + "title": "Electron microscope data for photons", + "site_ownership_code": "LLNL", + "product_type": "TR", + "description": "Hello, from teh other side", +} +valid_save_update_json = { + "title": "Electron microscope data for photons", + "site_ownership_code": "LLNL", + "product_type": "TR", + "description": "A NEW custom description. Search on 'Allo-ballo holla olah'.", +} +invalid_save_json = {"product_type": "TD", "site_ownership_code": "LLNL"} +valid_submit_json = { + "persons": [ + { + "type": "AUTHOR", + "first_name": "Required", + "middle_name": "Optional", + "last_name": "Required", + "email": ["optional@optional.org"], + "orcid": "0000000155554447", + "phone": "Optional", + "affiliations": [{"name": "Optional"}], + }, + { + "type": "RELEASE", + "first_name": "Required", + "middle_name": "Optional", + "last_name": "Required", + "email": ["required@required.org"], + "phone": "Optional", + }, + { + "type": "CONTRIBUTING", + "first_name": "Required", + "middle_name": "Optional", + "last_name": "Required", + "email": ["optional@optional.org"], + "phone": "Optional", + "contributor_type": "Producer", + "affiliations": [{"name": "Optional"}], + }, + ], + "organizations": [ + {"type": "AUTHOR", "name": "Required"}, + {"type": "CONTRIBUTING", "name": "Required", "contributor_type": "Producer"}, + { + "type": "SPONSOR", + "name": "Required", + "identifiers": [ + {"type": "CN_NONDOE", "value": "Required"}, + {"type": "CN_DOE", "value": "SC0001234"}, + {"type": "AWARD_DOI", "value": "Optional"}, + ], + }, + {"type": "RESEARCHING", "name": "Required"}, + ], + "identifiers": [ + {"type": "CN_DOE", "value": "SC0001234"}, + {"type": "CN_NONDOE", "value": "Required"}, + ], + "related_identifiers": [], + "access_limitations": ["UNL"], + "country_publication_code": "US", + "description": "Information about a particular record, report, or other document, or executive summary or abstract of same.", + "languages": ["English"], + "product_type": "TR", + "publication_date": "2018-02-21", + "publication_date_text": "Winter 2012", + "released_to_osti_date": "2023-03-03", + "site_ownership_code": "LBNL", + "title": "Sample document title", +} + + +@pytest.fixture +def elink_review_client(): + """ + tests whether or not the elink review client can be properly retrieved. + returns the elink review client + """ + elink_review_api_key = os.getenv("elink_review_api_token") + review_endpoint = os.getenv("ELINK_REVIEW_ENDPOINT") + return Elink(token=elink_review_api_key, target=review_endpoint) + + +@pytest.fixture +def elink_production_client(): + """ + tests whether or not the elink review client can be properly retrieved. + returns the elink review client + """ + elink_prod_api_key = os.getenv("elink_api_PRODUCTION_key") + return Elink(token=elink_prod_api_key) + + +osti_id = "2300069" +# osti_id = 2300063 +media_id = "1900082" +reason = "I wanted to" +revision_number = "2" +date = datetime.now() +state = "save" +file_path = "./test_media_files/media_file.txt" +file_path2 = "./test_media_files/best_media_file.txt" +file_path3 = "./test_media_files/another_media_file.txt" +json_responses = [] +reserved_osti_id = 1 + + +# RECORD ENDPOINTS +# Post a new Record +@pytest.fixture +def test_post_new_record(elink_review_client): + record_to_post = make_minimum_record_to_fully_release( + title="Test Post Record - PyTest" + ) + # try: + # saved_record = elink_review_client.post_new_record(record_to_post, "save") # Works - saved + # except exceptions.ForbiddenException as fe: + # pytest.fail(f"Forbidden: Check API key or permissions associated with provided API key. {fe}") + # except exceptions.BadRequestException as ve: + # pytest.fail(f"Bad Request: Possibly incorrect parameters. {ve}") + # except Exception as e: + # pytest.fail(f"Unexpected error: {e}") + + try: + submitted_record = elink_review_client.post_new_record( + record_to_post, "submit" + ) # Works - submit + return submitted_record + except exceptions.ForbiddenException as fe: + pytest.fail( + f"Forbidden: Check API key or permissions associated with provided API key. {fe}" + ) + except exceptions.BadRequestException as ve: + pytest.fail(f"Bad Request: Possibly incorrect parameters. {ve}") + except Exception as e: + pytest.fail(f"Unexpected error: {e}") + + +def test_get_new_single_record(test_post_new_record): + # record_to_post = make_minimum_record_to_fully_release(title="Test Getting New Single Record - PyTest") + # submitted_record = elink_review_client.post_new_record(record_to_post, "submit") + + posted_record = test_post_new_record + + elink_review_api_key = os.getenv("elink_review_api_token") + review_endpoint = os.getenv("ELINK_REVIEW_ENDPOINT") + elink_review_client = Elink(token=elink_review_api_key, target=review_endpoint) + + osti_id = test_post_new_record.osti_id + + single_record = elink_review_client.get_single_record(osti_id) + + assert osti_id is not None + assert single_record.title == posted_record.title + # assert single_record.organizations == record_to_post.organizations # this doesn't work because Elink's pydantic model defaults empty identifier to [], where as an empty identifier field is returned as None. + # assert single_record.persons == record_to_post.persons # same issue as above^ + assert single_record.publication_date == posted_record.publication_date + + +def test_invalid_query(elink_production_client): + list_of_records = elink_production_client.query_records( + title="Allo-ballo holla olah" + ) # works, nothing found + assert list_of_records.total_rows == 0 + + +# Reserve a DOI +def test_reserve_DOI(elink_review_client): + try: + elink_review_client.reserve_doi(Record(**valid_save_json)) # works - naved + except Exception: + print("failed to reserve doi on record") + + +def test_update_record(test_post_new_record): + posted_record = test_post_new_record + osti_id = posted_record.osti_id + + elink_review_api_key = os.getenv("elink_review_api_token") + review_endpoint = os.getenv("ELINK_REVIEW_ENDPOINT") + elink_review_client = Elink(token=elink_review_api_key, target=review_endpoint) + + # Update an existing Record + elink_review_client.update_record( + osti_id, + make_minimum_record_to_fully_release("Test Updating Record - PyTest"), + "submit", + ) # works + + # Get Revision based on revision number + elink_review_client.get_revision_by_number(osti_id, revision_number) # works + # Get Revision based on date Currently Not Working...? + # revision_by_date = elink_review_client.get_revision_by_date(osti_id, date.strftime("%Y-%d-%m")) # works + # Get all RevisionHistory of a Record + revision_history = elink_review_client.get_all_revisions(osti_id) # works + revision_history[0] + revision_history[-1] + + # # MEDIA ENDPOINTS + # # Associate new Media with a Record + # posted_media = elink_review_client.post_media(osti_id, file_path, {"title": "Title of the Media media_file.txt"}) + # posted_media3 = elink_review_client.post_media(osti_id, file_path3, {"title": "Title of the Media media_file.txt"}) + # media_id = posted_media.media_id + # # Replace existing Media on a Record + # replaced_media2 = elink_review_client.put_media(osti_id, media_id, file_path2, {"title": "Changed this title now"}) + # # Get Media associated with OSTI ID + # media = elink_review_client.get_media(osti_id) + # # Get Media content of a media resource + # media_content = elink_review_client.get_media_content(media_id) + # # Delete Media with media_id off of a Record + # isSuccessDelete = elink_review_client.delete_single_media(osti_id, media_id, reason) #works + # assert isSuccessDelete + # # Delete all Media associated with a Record + # isSuccessAllDelete = elink_review_client.delete_all_media(osti_id, reason) + # assert isSuccessAllDelete + + # # Should see that all media has been deleted + # final_media = elink_review_client.get_media(osti_id) + + # print("Finished") From 87798dc836897d01af5e5bbeab202f603df81178 Mon Sep 17 00:00:00 2001 From: HugoOnghai Date: Thu, 24 Jul 2025 10:35:45 -0700 Subject: [PATCH 14/65] Forgot to allow for action to run on push to testing-suite --- .github/workflows/testing.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/testing.yml b/.github/workflows/testing.yml index 9f71a9e..5af2906 100644 --- a/.github/workflows/testing.yml +++ b/.github/workflows/testing.yml @@ -2,9 +2,9 @@ name: testing on: push: - branches: [master] + branches: [master, testing-suite] pull_request: - branches: [master] + branches: [master, testing-suite] jobs: test: From e1eabc7920d42eb0874dae3d1e1c91753beffdb1 Mon Sep 17 00:00:00 2001 From: HugoOnghai Date: Thu, 24 Jul 2025 10:52:48 -0700 Subject: [PATCH 15/65] Trying to fix environment variables... --- .github/workflows/testing.yml | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/.github/workflows/testing.yml b/.github/workflows/testing.yml index 5af2906..c2a809d 100644 --- a/.github/workflows/testing.yml +++ b/.github/workflows/testing.yml @@ -33,7 +33,8 @@ jobs: - name: Run tests env: - ELINK_REVIEW_API_TOKEN: ${{ secrets.ELINK_REVIEW_API_TOKEN }} + elink_api_PRODUCTION_key: ${{ secrets.ELINK_PRODUCTION_API_TOKEN }} + elink_review_api_token: ${{ secrets.ELINK_REVIEW_API_TOKEN }} ELINK_REVIEW_ENDPOINT: ${{ secrets.ELINK_REVIEW_ENDPOINT }} run: uv run pytest tests # codecov? From 17a25e712a078d426f3941c1c9131b5c23362893 Mon Sep 17 00:00:00 2001 From: HugoOnghai Date: Thu, 24 Jul 2025 11:02:55 -0700 Subject: [PATCH 16/65] Trying to resolve install dotenv issue --- .github/workflows/testing.yml | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/.github/workflows/testing.yml b/.github/workflows/testing.yml index c2a809d..ad6caa6 100644 --- a/.github/workflows/testing.yml +++ b/.github/workflows/testing.yml @@ -29,7 +29,10 @@ jobs: python-version: ${{ matrix.python-version }} - name: Install the project - run: uv sync --locked --all-extras --dev + run: | + uv sync --locked --all-extras --dev + uv pip install python-dotenv + - name: Run tests env: From 09c615360ae6270102222d085581fe3eaf8f1d68 Mon Sep 17 00:00:00 2001 From: HugoOnghai Date: Thu, 24 Jul 2025 11:34:26 -0700 Subject: [PATCH 17/65] Verifying which packages install during workflow --- .github/workflows/testing.yml | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/.github/workflows/testing.yml b/.github/workflows/testing.yml index ad6caa6..16b1cd0 100644 --- a/.github/workflows/testing.yml +++ b/.github/workflows/testing.yml @@ -29,10 +29,10 @@ jobs: python-version: ${{ matrix.python-version }} - name: Install the project - run: | - uv sync --locked --all-extras --dev - uv pip install python-dotenv + run: uv sync --locked --all-extras --dev + - name: Verify installed packages + run: uv pip list - name: Run tests env: From 606d8a884bf97100c1afa80b44d4d4ff6384a349 Mon Sep 17 00:00:00 2001 From: HugoOnghai Date: Thu, 24 Jul 2025 11:41:46 -0700 Subject: [PATCH 18/65] Trying to update uv.lock --- .github/workflows/testing.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/testing.yml b/.github/workflows/testing.yml index 16b1cd0..03adf8e 100644 --- a/.github/workflows/testing.yml +++ b/.github/workflows/testing.yml @@ -39,5 +39,5 @@ jobs: elink_api_PRODUCTION_key: ${{ secrets.ELINK_PRODUCTION_API_TOKEN }} elink_review_api_token: ${{ secrets.ELINK_REVIEW_API_TOKEN }} ELINK_REVIEW_ENDPOINT: ${{ secrets.ELINK_REVIEW_ENDPOINT }} - run: uv run pytest tests + run: uvx pytest tests # codecov? From 1cf9951e73d9f1e99bdf98de3bb4d1c5af104bd0 Mon Sep 17 00:00:00 2001 From: HugoOnghai Date: Thu, 24 Jul 2025 11:56:25 -0700 Subject: [PATCH 19/65] Fixed/added python-dotenv in the uv.lock file --- pyproject.toml | 1 + uv.lock | 11 +++++++++++ 2 files changed, 12 insertions(+) diff --git a/pyproject.toml b/pyproject.toml index e93ed31..d589bfb 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -17,6 +17,7 @@ dependencies = [ "elinkapi>=0.4.9", "pydantic>=2.11.7", "pymongo>=4.13.2", + "python-dotenv>=1.1.1", ] [dependency-groups] diff --git a/uv.lock b/uv.lock index e6682a7..287c431 100644 --- a/uv.lock +++ b/uv.lock @@ -303,6 +303,7 @@ dependencies = [ { name = "elinkapi" }, { name = "pydantic" }, { name = "pymongo" }, + { name = "python-dotenv" }, ] [package.dev-dependencies] @@ -323,6 +324,7 @@ requires-dist = [ { name = "elinkapi", specifier = ">=0.4.9" }, { name = "pydantic", specifier = ">=2.11.7" }, { name = "pymongo", specifier = ">=4.13.2" }, + { name = "python-dotenv", specifier = ">=1.1.1" }, ] [package.metadata.requires-dev] @@ -608,6 +610,15 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/ec/57/56b9bcc3c9c6a792fcbaf139543cee77261f3651ca9da0c93f5c1221264b/python_dateutil-2.9.0.post0-py2.py3-none-any.whl", hash = "sha256:a8b2bc7bffae282281c8140a97d3aa9c14da0b136dfe83f850eea9a5f7470427", size = 229892, upload-time = "2024-03-01T18:36:18.57Z" }, ] +[[package]] +name = "python-dotenv" +version = "1.1.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/f6/b0/4bc07ccd3572a2f9df7e6782f52b0c6c90dcbb803ac4a167702d7d0dfe1e/python_dotenv-1.1.1.tar.gz", hash = "sha256:a8a6399716257f45be6a007360200409fce5cda2661e3dec71d23dc15f6189ab", size = 41978, upload-time = "2025-06-24T04:21:07.341Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/5f/ed/539768cf28c661b5b068d66d96a2f155c4971a5d55684a514c1a0e0dec2f/python_dotenv-1.1.1-py3-none-any.whl", hash = "sha256:31f23644fe2602f88ff55e1f5c79ba497e01224ee7737937930c448e4d0e24dc", size = 20556, upload-time = "2025-06-24T04:21:06.073Z" }, +] + [[package]] name = "pyyaml" version = "6.0.2" From 8dfc4244c4c056f7600a5375466d8ec8081bc2fa Mon Sep 17 00:00:00 2001 From: HugoOnghai Date: Thu, 24 Jul 2025 12:48:14 -0700 Subject: [PATCH 20/65] Returning to using uv run pytest tests --- .github/workflows/testing.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/testing.yml b/.github/workflows/testing.yml index 03adf8e..16b1cd0 100644 --- a/.github/workflows/testing.yml +++ b/.github/workflows/testing.yml @@ -39,5 +39,5 @@ jobs: elink_api_PRODUCTION_key: ${{ secrets.ELINK_PRODUCTION_API_TOKEN }} elink_review_api_token: ${{ secrets.ELINK_REVIEW_API_TOKEN }} ELINK_REVIEW_ENDPOINT: ${{ secrets.ELINK_REVIEW_ENDPOINT }} - run: uvx pytest tests + run: uv run pytest tests # codecov? From 31122838696c32cc33f6c5c5ec999f01852743b3 Mon Sep 17 00:00:00 2001 From: HugoOnghai Date: Thu, 24 Jul 2025 12:50:24 -0700 Subject: [PATCH 21/65] Removed extraneous test file --- tests/test_elink_api.py | 109 ---------------------------------------- 1 file changed, 109 deletions(-) delete mode 100644 tests/test_elink_api.py diff --git a/tests/test_elink_api.py b/tests/test_elink_api.py deleted file mode 100644 index 80afba7..0000000 --- a/tests/test_elink_api.py +++ /dev/null @@ -1,109 +0,0 @@ -import os -from dotenv import load_dotenv - -from elinkapi import Elink, Record, exceptions -import pytest -from mpcite.models import ELinkGetResponseModel, TestClass - -from pymongo import MongoClient -import pymongo - -load_dotenv() - -atlas_user = os.environ.get("atlas_user") -atlas_password = os.environ.get("atlas_password") -atlas_host = os.environ.get("atlas_host") -mongo_uri = f"mongodb+srv://{atlas_user}:{atlas_password}@{atlas_host}/" - -api = Elink(token=os.environ.get("elink_api_PRODUCTION_key")) # target default is production E-link service. - -### Grabbing an existing record - -# record = api.get_single_record(mp-id) # test for silicon - -# type(record) - -# ELinkGotRecordModel = ELinkGetResponseModel.from_elinkapi_record(record) - -# print(ELinkGotRecordModel.get_title()) -# print(ELinkGotRecordModel.get_site_url()) -# print(ELinkGotRecordModel.get_keywords()) -# print(ELinkGotRecordModel.get_default_description()) - -# ELinkTestGetRecordModel = TestClass(**record.model_dump()) - -### Making a new record - -# with MongoClient(mongo_uri) as client: -# #get all material_ids and dois from doi collection -# doi_collection = client["mp_core"]["dois"] -# materials_to_update = list(doi_collection.find({}, {"_id": 0, "material_id": 1, "doi": 1}, limit=10)) -# material_ids = [entry["material_id"] for entry in materials_to_update] - -# # check # of material_ids from DOI collection vs amount in robocrys - -# # get description for material_ids from robocrys collection -# coll = client["mp_core_blue"]["robocrys"] -# res = list(coll.find({"material_id": {"$in": material_ids}}, {"_id": 0, "material_id": 1, "description": 1})) - -# # join on material_id -# for doc in res: -# mat = next(filter(lambda x: x["material_id"] == doc["material_id"], materials_to_update)) -# doc["doi"] = mat["doi"] - - -# {"material_id": ..., "doi": ..., "description": ...} -> -# Record( -# template_fields ..., -# doi: ..., -# description: ..., -# fields_where_material_id_makes_sense: ..., -# ) - -# with the client open -with MongoClient(mongo_uri) as client: - # get all dois from the collection - doi_collection = client["mp_core"]["dois"] - materials_to_update = list(doi_collection.find({}, {"_id": 0, "doi": 1, "material_id": 1}, limit=2)) - - # from the doi collection, grab the material_id and doi of each material - material_ids = [entry["material_id"] for entry in materials_to_update] - - # additionally, gain the osti id from the doi - osti_ids = [entry["doi"].split("10.17188/")[1] for entry in materials_to_update] - - # additionally, grab the description of each material from the robocrys - coll = client["mp_core_blue"]["robocrys"] # grabs robocrys collection from active database - res = list(coll.find({"material_id": {"$in": material_ids}}, {"_id": 0, "material_id": 1, "description": 1})) # grabs the material id and description of entries in the collection - descriptions = [entry["description"] for entry in res] - - # for each material (and its material_id, doi, and osti_id) - for i in range(len(materials_to_update)): - internal_material_id = material_ids[i] - internal_osti_id = osti_ids[i] - internal_description = descriptions[i] - - # get_single_record(osti_id) - record = api.get_single_record(internal_osti_id) - - print(f"\n \n \nPrinting what is currently on ELINK for {internal_material_id}*****************************************") - print(record) - - if internal_material_id == record.site_unique_id: - # update description - record.description = "testTESTtestTESTtest" - - print(f"\n \n \nPrinting record for {internal_material_id}~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~") - print(record) - - # # post updated record - # try: - # saved_record = api.post_new_record(record, "save") - # except exceptions.BadRequestException as ve: - # ... - # # ve.message = "Site Code AAAA is not valid." - # # ve.errors provides more details: - # # [{"status":"400", "detail":"Site Code AAAA is not valid.", "source":{"pointer":"site_ownership_code"}}] - - - From a1f12e04e9bf8093a99b23af103a200d2793e860 Mon Sep 17 00:00:00 2001 From: HugoOnghai Date: Wed, 6 Aug 2025 13:36:38 -0700 Subject: [PATCH 22/65] Trying to fix the broken elinkapi_tests but they depend on the new DAMinimum model in new-core-to-rebase... --- __init__.py | 0 src/__init__.py | 0 tests/elinkapi_test.py | 9 ++++++++- 3 files changed, 8 insertions(+), 1 deletion(-) create mode 100644 __init__.py create mode 100644 src/__init__.py diff --git a/__init__.py b/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/src/__init__.py b/src/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/tests/elinkapi_test.py b/tests/elinkapi_test.py index 0eb6725..2f4eae9 100644 --- a/tests/elinkapi_test.py +++ b/tests/elinkapi_test.py @@ -1,10 +1,17 @@ import pytest from elinkapi import Elink, Record, exceptions + +import sys import os -from src.mp_cite.core import make_minimum_record_to_fully_release + from dotenv import load_dotenv from datetime import datetime +sys.path.insert(0, os.path.abspath(os.path.join(os.path.dirname(__file__), ".."))) +from src.mp_cite.core import ( + make_minimum_record_to_fully_release, +) # cannot find a good workaround for this... + load_dotenv() From b35491c1aa5d7a873d91787d5c092efd0e5d3a9a Mon Sep 17 00:00:00 2001 From: HugoOnghai Date: Thu, 7 Aug 2025 11:37:07 -0700 Subject: [PATCH 23/65] Finished rebase after new core PR merged, trying to clean up and get tests running again --- .github/workflows/release.yml | 8 --- .github/workflows/testing.yml | 15 +---- src/mp_cite/core.py | 5 +- src/mp_cite/models.py | 112 ---------------------------------- 4 files changed, 6 insertions(+), 134 deletions(-) diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index 6f3c42d..e5d2a28 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -1,7 +1,6 @@ name: release on: -<<<<<<< HEAD release: types: [published] @@ -71,10 +70,3 @@ jobs: - name: Build and Deploy! run: uvx mkdocs gh-deploy -======= - push: - branches: [master] - pull_request: - branches: [master] -# TODO: setup release to pypi ->>>>>>> 52382ff (Merged upstream (#1)) diff --git a/.github/workflows/testing.yml b/.github/workflows/testing.yml index 16b1cd0..c5bc018 100644 --- a/.github/workflows/testing.yml +++ b/.github/workflows/testing.yml @@ -2,9 +2,9 @@ name: testing on: push: - branches: [master, testing-suite] + branches: [master] pull_request: - branches: [master, testing-suite] + branches: [master] jobs: test: @@ -17,11 +17,6 @@ jobs: runs-on: ${{ matrix.os }} steps: - uses: actions/checkout@v4 -<<<<<<< HEAD -======= - with: - fetch-depth: 0 ->>>>>>> 52382ff (Merged upstream (#1)) - name: Install uv uses: astral-sh/setup-uv@v6 @@ -31,13 +26,9 @@ jobs: - name: Install the project run: uv sync --locked --all-extras --dev - - name: Verify installed packages - run: uv pip list - - name: Run tests env: elink_api_PRODUCTION_key: ${{ secrets.ELINK_PRODUCTION_API_TOKEN }} elink_review_api_token: ${{ secrets.ELINK_REVIEW_API_TOKEN }} ELINK_REVIEW_ENDPOINT: ${{ secrets.ELINK_REVIEW_ENDPOINT }} - run: uv run pytest tests -# codecov? + run: uv run pytest tests \ No newline at end of file diff --git a/src/mp_cite/core.py b/src/mp_cite/core.py index c7e54e6..8cb16d5 100644 --- a/src/mp_cite/core.py +++ b/src/mp_cite/core.py @@ -3,11 +3,12 @@ from pymongo import MongoClient import requests from elinkapi.utils import Validation -from mp_cite.models import MinimumDARecord +from models import MinimumDARecord from typing import Literal, TypeAlias OstiID: TypeAlias = int + def find_out_of_date_doi_entries( rc_client: MongoClient, doi_client: MongoClient, @@ -152,4 +153,4 @@ def delete_osti_record(elinkapi: Elink, osti_id: OstiID, reason: str) -> bool: headers={"Authorization": f"Bearer {elinkapi.token}"}, ) Validation.handle_response(response) - return response.status_code == 204 # True if deleted successfully \ No newline at end of file + return response.status_code == 204 # True if deleted successfully diff --git a/src/mp_cite/models.py b/src/mp_cite/models.py index 71b4c9a..8bbaf74 100644 --- a/src/mp_cite/models.py +++ b/src/mp_cite/models.py @@ -1,4 +1,3 @@ -<<<<<<< HEAD from pydantic import BaseModel, Field, model_validator from datetime import datetime @@ -83,114 +82,3 @@ class MinimumDARecord(Record): default_factory=lambda: datetime.now(tz=pytz.UTC) ) site_url: str = Field(default="https://next-gen.materialsproject.org/materials") -======= -from pydantic import BaseModel, Field, ConfigDict -from typing import List, Dict, Optional -import datetime -from enum import Enum -import bibtexparser -from elinkapi import Elink, Record -from elinkapi.record import RecordResponse, AccessLimitation, JournalType -from elinkapi.geolocation import Geolocation -from elinkapi.identifier import Identifier -from elinkapi.related_identifier import RelatedIdentifier -from elinkapi.person import Person -from elinkapi.organization import Organization - -class TestClass(RecordResponse): - ... - # stuff - -class ELinkGetResponseModel(BaseModel): - osti_id: Optional[int] = Field(...) - dataset_type: str = Field(default="SM") - title: str = Field(...) - persons: List[Person] - contributors: List[Dict[str, str]] = Field( - default=[{"first_name": "Materials", "last_name": "Project"}], - description="List of Dict of first name, last name mapping", - ) # no contributor - publication_date: datetime.date - site_url: str = Field(...) - doi: dict = Field( - {}, title="DOI info", description="Mainly used during GET request" - ) - mp_id: str | None = None - keywords: List[str] = None - - @classmethod - def from_elinkapi_record(cls, R): - gotResponse = ELinkGetResponseModel( - osti_id = R.osti_id, - title = R.title, - persons = R.persons, - # assume default contributors for now, creators vs contributors? - publication_date = R.publication_date, - site_url = R.site_url, - doi = {"doi": R.doi}, - mp_id = next((id.value for id in R.identifiers if id.type == 'RN'), None), - keywords = R.keywords - ) - - return gotResponse - - def get_title(self): - formula = self.keywords[1] - return "Materials Data on %s by Materials Project" % formula - - def get_site_url(self): - return "https://materialsproject.org/materials/%s" % self.mp_id - - def get_keywords(self): - # keywords = "; ".join( - # ["crystal structure", material.pretty_formula, material.chemsys] - # ) - return self.keywords - - @classmethod - def get_default_description(cls): - return ( - "Computed materials data using density " - "functional theory calculations. These calculations determine " - "the electronic structure of bulk materials by solving " - "approximations to the Schrodinger equation. For more " - "information, see https://materialsproject.org/docs/calculations" - ) - - @classmethod - def custom_to_dict(cls, elink_record) -> dict: - if elink_record.osti_id is None or elink_record.osti_id == "": - return elink_record.dict(exclude={"osti_id", "doi"}) - else: - return elink_record.dict(exclude={"doi"}) - - -class ElinkResponseStatusEnum(Enum): - SUCCESS = "SUCCESS" - FAILED = "FAILURE" - - -class ELinkPostResponseModel(BaseModel): - osti_id: str - accession_num: str - product_nos: str - title: str - contract_nos: str - other_identifying_nos: Optional[str] - doi: Dict[str, str] - status: ElinkResponseStatusEnum - status_message: Optional[str] - - def generate_doi_record(self): - doi_collection_record = DOIRecordModel( - material_id=self.accession_num, - doi=self.doi["#text"], - status=self.doi["@status"], - bibtex=None, - valid=True, - last_validated_on=datetime.now(), - ) - doi_collection_record.set_status(status=self.doi["@status"]) - doi_collection_record.last_validated_on = datetime.now() - return doi_collection_record ->>>>>>> 5fa46e4 (Merged upstream (#1)) From 0346ff48110bdad8b9119960493c425f3be8b19d Mon Sep 17 00:00:00 2001 From: HugoOnghai Date: Thu, 7 Aug 2025 11:44:37 -0700 Subject: [PATCH 24/65] Removed extra files in tests/ and updated email on pyproject.toml --- legacy/mpcite/recordresponse_example.txt | 0 pyproject.toml | 1 - src/mp_cite/core.py | 8 +- src/mp_cite/send_collection.py | 79 --------------- tests/manage_backfills.py | 49 --------- tests/outputs.txt | 46 --------- tests/prod_to_review.py | 120 ----------------------- uv.lock | 11 --- 8 files changed, 7 insertions(+), 307 deletions(-) delete mode 100644 legacy/mpcite/recordresponse_example.txt delete mode 100644 src/mp_cite/send_collection.py delete mode 100644 tests/manage_backfills.py delete mode 100644 tests/outputs.txt delete mode 100644 tests/prod_to_review.py diff --git a/legacy/mpcite/recordresponse_example.txt b/legacy/mpcite/recordresponse_example.txt deleted file mode 100644 index e69de29..0000000 diff --git a/pyproject.toml b/pyproject.toml index d589bfb..e93ed31 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -17,7 +17,6 @@ dependencies = [ "elinkapi>=0.4.9", "pydantic>=2.11.7", "pymongo>=4.13.2", - "python-dotenv>=1.1.1", ] [dependency-groups] diff --git a/src/mp_cite/core.py b/src/mp_cite/core.py index 8cb16d5..03bacf5 100644 --- a/src/mp_cite/core.py +++ b/src/mp_cite/core.py @@ -1,10 +1,16 @@ +from typing import TypeAlias + from elinkapi import Elink from elinkapi.record import RecordResponse from pymongo import MongoClient + import requests from elinkapi.utils import Validation + + from models import MinimumDARecord -from typing import Literal, TypeAlias + +from typing import Literal OstiID: TypeAlias = int diff --git a/src/mp_cite/send_collection.py b/src/mp_cite/send_collection.py deleted file mode 100644 index 0ce65a3..0000000 --- a/src/mp_cite/send_collection.py +++ /dev/null @@ -1,79 +0,0 @@ -from pathlib import Path -from xml.dom.minidom import parseString -from dicttoxml import dicttoxml -from mpcite.doi_builder import DOIBuilder -import json -from monty.json import MontyDecoder -from pydantic import BaseModel, Field -from typing import List - -default_description = ( - "Computed materials data using density functional theory calculations. These " - "calculations determine the electronic structure of bulk materials by solving " - "approximations to the Schrodinger equation. For more information, " - "see https://materialsproject.org/docs/calculations" -) - - -class CollectionsModel(BaseModel): - title: str = Field(default="Sample Title") - product_type: str = Field(default="DC") - relidentifiersblock: List[List[str]] = Field() - contributors: List[dict] - description: str = Field(default=default_description) - site_url: str = Field(default="https://materialsproject.org/") - - -config_file = Path("/Users/michaelwu/Desktop/projects/MPCite/files/config_prod.json") - -bld: DOIBuilder = json.load(config_file.open("r"), cls=MontyDecoder) -bld.config_file_path = config_file.as_posix() - -records = [ - CollectionsModel( - relidentifiersblock=[["mp-1", "mp-2", "mp-1"]], - contributors=[ - { - "first_name": "Michael", - "last_name": "Wu", - "email": "wuxiaohua1011@berkeley.edu", - } - ], - ).dict(), - CollectionsModel( - relidentifiersblock=[["mp-21"], ["mp-22"]], - contributors=[ - { - "first_name": "Michael", - "last_name": "Wu", - "email": "wuxiaohua1011@berkeley.edu", - } - ], - ).dict(), -] - - -def my_item_func(x): - if x == "records": - return "record" - elif x == "contributors": - return "contributor" - elif x == "relidentifier_detail": - return "related_identifier" - elif x == "relidentifiersblock": - return "relidentifier_detail" - else: - return "item" - - -records_xml = parseString( - dicttoxml(records, custom_root="records", attr_type=False, item_func=my_item_func) -) - -for item in records_xml.getElementsByTagName("relidentifier_detail"): - item.setAttribute("type", "accession_num") - item.setAttribute("relationType", "Compiles") - -print(records_xml.toprettyxml()) -# response = bld.elink_adapter.post_collection(data=records_xml.toxml()) -# print(response) diff --git a/tests/manage_backfills.py b/tests/manage_backfills.py deleted file mode 100644 index a835456..0000000 --- a/tests/manage_backfills.py +++ /dev/null @@ -1,49 +0,0 @@ -# This script will see how many documents in ELink, i.e. ones with a DOI, are not accounted for in the internal DOI collection. - -from elinkapi import Elink, Query, Record - -import os -from dotenv import load_dotenv - -load_dotenv() # depends on the root directory from which you run your python scripts. - -api = Elink(token = os.environ.get("elink_api_PRODUCTION_key")) - - -query1 = api.query_records(rows=1000) - -materials_with_dois : list[Record] = [] - -for page in query1: - print(f"Now on Page: {page.title}") - print(f"Material_ID: {page.site_unique_id} and DOI: http://doi.org/{page.doi}") - - if page.site_unique_id.startswith("mp-"): - materials_with_dois.append(page) - - # for record in page.data: - # if record.site_unique_id.startswith("mp-"): - # materials_with_dois.append(record) - - - -# set_q1 = [page for page in query1] -# set_q2 = [page for page in query2] - -# set_diffq1q2 = set(set_q1) - set(set_q2) -# print (f"Difference matched {len(set)} records") - -# filtered = [ -# page for page in query1 -# if page.title.lower().startswith("materials data on") -# ] - -# print (f"Filtered Query1 has {len(filtered)} records") - -# paginate through ALL results -# for page in query1: -# print(page.title) -# print(f"Material_ID: {page.site_unique_id} and DOI: http://doi.org/{page.doi}") - -# for record in page.data: -# print (f"OSTI ID: {record.osti_id} Title: {record.title}") \ No newline at end of file diff --git a/tests/outputs.txt b/tests/outputs.txt deleted file mode 100644 index 8d188e7..0000000 --- a/tests/outputs.txt +++ /dev/null @@ -1,46 +0,0 @@ -(mpcite-env) C:\Users\ongha\OneDrive\Documents\GitHub\MPCite>C:/Users/ongha/anaconda3/envs/mpcite-env/python.exe c:/Users/ongha/OneDrive/Documents/GitHub/MPCite/tests/prod_to_review.py - -Query retrieved 144845 record(s) -Page finished. Now at 500 data entries. 0 edge cases found. -Page finished. Now at 1000 data entries. 0 edge cases found. -Page finished. Now at 1500 data entries. 0 edge cases found. -Page finished. Now at 2000 data entries. 0 edge cases found. -Page finished. Now at 2500 data entries. 0 edge cases found. -Page finished. Now at 3000 data entries. 0 edge cases found. -Page finished. Now at 3500 data entries. 0 edge cases found. -Page finished. Now at 4000 data entries. 0 edge cases found. -Page finished. Now at 4500 data entries. 0 edge cases found. -Page finished. Now at 5000 data entries. 0 edge cases found. -Page finished. Now at 5500 data entries. 0 edge cases found. -Page finished. Now at 6000 data entries. 0 edge cases found. -Page finished. Now at 6500 data entries. 0 edge cases found. -Page finished. Now at 7000 data entries. 0 edge cases found. -Page finished. Now at 7500 data entries. 0 edge cases found. -Page finished. Now at 8000 data entries. 0 edge cases found. -Page finished. Now at 8500 data entries. 0 edge cases found. -Page finished. Now at 9000 data entries. 0 edge cases found. -Page finished. Now at 9500 data entries. 0 edge cases found. -Page finished. Now at 10000 data entries. 0 edge cases found. -Page finished. Now at 10500 data entries. 0 edge cases found. -Page finished. Now at 11000 data entries. 0 edge cases found. -Page finished. Now at 11500 data entries. 0 edge cases found. -Page finished. Now at 12000 data entries. 0 edge cases found. -Page finished. Now at 12500 data entries. 0 edge cases found. -Page finished. Now at 13000 data entries. 0 edge cases found. -Page finished. Now at 13500 data entries. 0 edge cases found. -Page finished. Now at 14000 data entries. 0 edge cases found. -Page finished. Now at 14500 data entries. 0 edge cases found. - -Traceback (most recent call last): - File "C:\Users\ongha\anaconda3\envs\mpcite-env\Lib\site-packages\elinkapi\query.py", line 95, in __next__ - record = self.data.pop() -IndexError: pop from empty list - -During handling of the above exception, another exception occurred: - -Traceback (most recent call last): - File "c:\Users\ongha\OneDrive\Documents\GitHub\MPCite\tests\prod_to_review.py", line 29, in - record = next(query) - File "C:\Users\ongha\anaconda3\envs\mpcite-env\Lib\site-packages\elinkapi\query.py", line 108, in __next__ - raise StopIteration -StopIteration \ No newline at end of file diff --git a/tests/prod_to_review.py b/tests/prod_to_review.py deleted file mode 100644 index 87e311d..0000000 --- a/tests/prod_to_review.py +++ /dev/null @@ -1,120 +0,0 @@ -from elinkapi import Elink, Query, Record - -import os -from dotenv import load_dotenv - -import json - -load_dotenv() # depends on the root directory from which you run your python scripts. - -review_endpoint = "https://review.osti.gov/elink2api/" - -prod_api = Elink(token = os.environ.get("elink_api_PRODUCTION_key")) -review_api = Elink(token = os.environ.get("elink_review_api_token"), target=review_endpoint) - -print(prod_api.query_records()) - -rows_per_page = 100 - -# query production -query = prod_api.query_records(rows=rows_per_page) -print(f"Query retrieved {query.total_rows} record(s)") - -count_materials_data = 0 -count_MaterialsDataOn = 0 -cwd = os.getcwd() -page_number = 0 -page_json_list = [] - -for record in query: - # increment counter - count_materials_data = count_materials_data + 1 - print(f"On record #{count_materials_data}, next url is {query.next_url}, previous url is {query.previous_url}") - - # see if the record is a Materials Data on record - if record.title.startswith("Materials Data on"): - # increment the MaterialsDataOn counter - count_MaterialsDataOn = count_MaterialsDataOn + 1 - - # prepare the new record for the review environment, remove the OSTI ID, and add its model_dump to the list of json objects for the page. - new_record = record - new_record_dict = new_record.model_dump(exclude_none=True) - - new_record_osti_id = new_record_dict.pop("osti_id") # now new_record_dict does not have the osti_id key. - js = json.dumps(new_record_dict, default=str) # datetime objects are not JSON serializable, so we use default=str to convert them to strings. - - page_json_list.append(js) - - # TODO: take the new_record_dict and make it into a new post to the review environment and save the RecordResponse. - - else: - print(f"Found edge case: {record.title}") - - if count_materials_data % rows_per_page == 0: - # create/open, write, and close new json file - page_number = count_materials_data / rows_per_page - path = f'/json_pages/page_number_{page_number}' - fp = open(cwd+path, 'a') - - for js in page_json_list: - fp.write(js) - fp.write("\n") - - fp.close() - page_json_list = [] - - print(f"Page {page_number} finished. Now at {count_materials_data} data entries. {count_materials_data - count_MaterialsDataOn} edge cases found.") - -# print remainder of records if not a full page after for loop exits -page_number = page_number + 1 -path = f'/json_pages/page_number_{page_number}' -fp = open(cwd+path, 'a') -for js in page_json_list: - fp.write(js) - fp.write("\n") -fp.close() - -# # if contains materials data on, then add to batch -# for count_materials_data < query.total_rows: - -# # print(f"The length of the query is now {len(query.data)}") -# record = next(query) -# count_materials_data = count_materials_data + 1 - -# if record.title.startswith("Materials Data on"): -# count_MaterialsDataOn = count_MaterialsDataOn + 1 - -# new_record = record -# new_record_dict = new_record.model_dump(exclude_none=True) - -# new_record_osti_id = new_record_dict.pop("osti_id") - -# page_dict[f"Entry OSTI_ID {new_record_osti_id}"] = new_record_dict - -# # TODO: take the new_record_dict and make it into a new post to the review environment and save the RecordResponse. - - - -# if count_materials_data % rows_per_page == 0: -# # if a page has been fully consummed, then print the new batched dictionary to a json file. - -# js = json.dumps(page_dict, default=str) - -# # open new json file if not exist it will create -# cwd = os.getcwd() -# path = f'/json_pages/page_number_{count_materials_data/rows_per_page}' -# fp = open(cwd+path, 'a') - -# # write to json file -# fp.write(js) - -# # close the connection to the file and empty the dict -# fp.close() -# page_dict = {} - -# print(f"Page {(count_materials_data / rows_per_page)} finished. Now at {count_materials_data} data entries. {count_materials_data - count_MaterialsDataOn} edge cases found.") - -# model_dump exclude_none=True, remove null keys -# pop osti_id --> save batch to json files -# make new record -# post to review_api diff --git a/uv.lock b/uv.lock index 287c431..e6682a7 100644 --- a/uv.lock +++ b/uv.lock @@ -303,7 +303,6 @@ dependencies = [ { name = "elinkapi" }, { name = "pydantic" }, { name = "pymongo" }, - { name = "python-dotenv" }, ] [package.dev-dependencies] @@ -324,7 +323,6 @@ requires-dist = [ { name = "elinkapi", specifier = ">=0.4.9" }, { name = "pydantic", specifier = ">=2.11.7" }, { name = "pymongo", specifier = ">=4.13.2" }, - { name = "python-dotenv", specifier = ">=1.1.1" }, ] [package.metadata.requires-dev] @@ -610,15 +608,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/ec/57/56b9bcc3c9c6a792fcbaf139543cee77261f3651ca9da0c93f5c1221264b/python_dateutil-2.9.0.post0-py2.py3-none-any.whl", hash = "sha256:a8b2bc7bffae282281c8140a97d3aa9c14da0b136dfe83f850eea9a5f7470427", size = 229892, upload-time = "2024-03-01T18:36:18.57Z" }, ] -[[package]] -name = "python-dotenv" -version = "1.1.1" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/f6/b0/4bc07ccd3572a2f9df7e6782f52b0c6c90dcbb803ac4a167702d7d0dfe1e/python_dotenv-1.1.1.tar.gz", hash = "sha256:a8a6399716257f45be6a007360200409fce5cda2661e3dec71d23dc15f6189ab", size = 41978, upload-time = "2025-06-24T04:21:07.341Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/5f/ed/539768cf28c661b5b068d66d96a2f155c4971a5d55684a514c1a0e0dec2f/python_dotenv-1.1.1-py3-none-any.whl", hash = "sha256:31f23644fe2602f88ff55e1f5c79ba497e01224ee7737937930c448e4d0e24dc", size = 20556, upload-time = "2025-06-24T04:21:06.073Z" }, -] - [[package]] name = "pyyaml" version = "6.0.2" From 7c247fda19723b6f08934713e5249175be743139 Mon Sep 17 00:00:00 2001 From: HugoOnghai Date: Thu, 7 Aug 2025 11:47:39 -0700 Subject: [PATCH 25/65] revert .gitignore and re-add lost legacy models file --- .gitignore | 15 +- legacy/mpcite/models.py | 319 ++++++++++++++++++++++++++++++++++++++++ 2 files changed, 322 insertions(+), 12 deletions(-) create mode 100644 legacy/mpcite/models.py diff --git a/.gitignore b/.gitignore index 6b641e9..8241d4c 100644 --- a/.gitignore +++ b/.gitignore @@ -183,9 +183,9 @@ cython_debug/ .abstra/ # Visual Studio Code -# Visual Studio Code specific template is maintained in a separate VisualStudioCode.gitignore +# Visual Studio Code specific template is maintained in a separate VisualStudioCode.gitignore # that can be found at https://github.com/github/gitignore/blob/main/Global/VisualStudioCode.gitignore -# and can be added to the global gitignore or merged into this file. However, if you prefer, +# and can be added to the global gitignore or merged into this file. However, if you prefer, # you could uncomment the following to ignore the entire vscode folder .vscode/ @@ -210,15 +210,6 @@ __marimo__/ # Streamlit .streamlit/secrets.toml -<<<<<<< HEAD json_pages/ notebooks/ -test_json_pages/ -======= -# json files for storing production records -*.json -.env -/json_pages -/notebooks -/test_json_pages ->>>>>>> b991f09 (New Branch for Linting Workflow) +test_json_pages/ \ No newline at end of file diff --git a/legacy/mpcite/models.py b/legacy/mpcite/models.py new file mode 100644 index 0000000..b2fab65 --- /dev/null +++ b/legacy/mpcite/models.py @@ -0,0 +1,319 @@ +from pydantic import BaseModel, Field +from typing import List, Dict, Optional +from datetime import datetime +from enum import Enum +import bibtexparser + + +class ConnectionModel(BaseModel): + endpoint: str = Field(..., title="URL Endpoint of the connection") + username: str = Field(..., title="User Name") + password: str = Field(..., title="Password") + + +class RoboCrysModel(BaseModel): + material_id: str + last_updated: datetime + description: Optional[str] = None + error: Optional[str] = None + + @classmethod + def get_default_description(cls): + return ( + "Computed materials data using density " + "functional theory calculations. These calculations determine " + "the electronic structure of bulk materials by solving " + "approximations to the Schrodinger equation. For more " + "information, see https://materialsproject.org/docs/calculations" + ) + + +class MaterialModel(BaseModel): + last_updated: datetime = Field( + None, title="timestamp for the most recent calculation" + ) + updated_at: datetime = Field(None, title="alternative to last_updated") + created_at: datetime = Field( + None, + description="creation time for this material defined by when the first structure " + "optimization calculation was run", + ) + task_id: str = Field( + "", title="task id for this material. Also called the material id" + ) + # pretty_formula: str = Field(..., title="clean representation of the formula") + pretty_formula: str = Field(..., title="clean representation of the formula") + chemsys: str + + +class ELinkGetResponseModel(BaseModel): + osti_id: Optional[str] = Field(...) + dataset_type: str = Field(default="SM") + title: str = Field(...) + creators: str = Field(default="Kristin Persson") # replace with authors + contributors: List[Dict[str, str]] = Field( + default=[{"first_name": "Materials", "last_name": "Project"}], + description="List of Dict of first name, last name mapping", + ) # no contributor + product_nos: str = Field(..., title="MP id") + accession_num: str = Field(..., title="MP id") + contract_nos: str = Field("AC02-05CH11231; EDCBEE") + originating_research_org: str = Field( + default="Lawrence Berkeley National Laboratory (LBNL), Berkeley, CA (United States)" + ) + publication_date: str = Field(...) + language: str = Field(default="English") + country: str = Field(default="US") + sponsor_org: str = Field( + default="USDOE Office of Science (SC), Basic Energy Sciences (BES) (SC-22)" + ) + site_url: str = Field(...) + contact_name: str = Field(default="Kristin Persson") + contact_org: str = Field(default="LBNL") + contact_email: str = Field(default="feedback@materialsproject.org") + contact_phone: str = Field(default="+1(510)486-7218") + related_resource: str = Field("https://materialsproject.org/citing") + contributor_organizations: str = Field(default="MIT; UC Berkeley; Duke; U Louvain") + subject_categories_code: str = Field(default="36 MATERIALS SCIENCE") + keywords: str = Field(...) + description: str = Field(default="") + doi: dict = Field( + {}, title="DOI info", description="Mainly used during GET request" + ) + + @classmethod + def get_title(cls, material: MaterialModel): + formula = material.pretty_formula + return "Materials Data on %s by Materials Project" % formula + + @classmethod + def get_site_url(cls, mp_id): + return "https://materialsproject.org/materials/%s" % mp_id + + @classmethod + def get_keywords(cls, material): + keywords = "; ".join( + ["crystal structure", material.pretty_formula, material.chemsys] + ) + return keywords + + @classmethod + def get_default_description(cls): + return ( + "Computed materials data using density " + "functional theory calculations. These calculations determine " + "the electronic structure of bulk materials by solving " + "approximations to the Schrodinger equation. For more " + "information, see https://materialsproject.org/docs/calculations" + ) + + @classmethod + def custom_to_dict(cls, elink_record) -> dict: + if elink_record.osti_id is None or elink_record.osti_id == "": + return elink_record.dict(exclude={"osti_id", "doi"}) + else: + return elink_record.dict(exclude={"doi"}) + + +class ElinkResponseStatusEnum(Enum): + SUCCESS = "SUCCESS" + FAILED = "FAILURE" + + +class ELinkPostResponseModel(BaseModel): + osti_id: str + accession_num: str + product_nos: str + title: str + contract_nos: str + other_identifying_nos: Optional[str] + doi: Dict[str, str] + status: ElinkResponseStatusEnum + status_message: Optional[str] + + def generate_doi_record(self): + doi_collection_record = DOIRecordModel( + material_id=self.accession_num, + doi=self.doi["#text"], + status=self.doi["@status"], + bibtex=None, + valid=True, + last_validated_on=datetime.now(), + ) + doi_collection_record.set_status(status=self.doi["@status"]) + doi_collection_record.last_validated_on = datetime.now() + return doi_collection_record + + +class DOIRecordStatusEnum(str, Enum): + COMPLETED = "COMPLETED" + PENDING = "PENDING" + FAILURE = "FAILURE" + INIT = "INIT" + + +class DOIRecordModel(BaseModel): + material_id: str = Field(...) + doi: str = Field(default="") + bibtex: Optional[str] = None + status: DOIRecordStatusEnum + valid: bool = Field(False) + last_updated: datetime = Field( + default=datetime.now(), + title="DOI last updated time.", + description="Last updated is defined as either a Bibtex or status change.", + ) + created_at: datetime = Field( + default=datetime.now(), + title="DOI Created At", + description="creation time for this DOI record", + ) + last_validated_on: datetime = Field( + default=datetime.now(), + title="Date Last Validated", + description="Date that this data is last validated, " "not necessarily updated", + ) + elsevier_updated_on: datetime = Field( + default=datetime.now(), + title="Date Elsevier is updated", + description="If None, means never uploaded to elsevier", + ) + error: Optional[str] = Field( + default=None, description="None if no error, else error message" + ) + + class Config: + use_enum_values = True + + def set_status(self, status): + self.status = status + + def get_osti_id(self): + if self.doi is None or self.doi == "": + return "" + else: + return self.doi.split("/")[-1] + + def get_bibtex_abstract(self): + try: + if self.bibtex is None: + return "" + bib_db: bibtexparser.bibdatabase.BibDatabase = bibtexparser.loads( + self.bibtex + ) + if bib_db.entries: + return bib_db.entries[0]["abstractnote"] + except Exception as e: + print(e) + return "" + + +class OSTIDOIRecordModel(DOIRecordModel): + material_id: str = Field(...) + doi: str = Field(default="") + bibtex: Optional[str] = None + valid: bool = Field(False) + last_updated: datetime = Field( + default=datetime.now(), + title="DOI last updated time.", + description="Last updated is defined as either a Bibtex or status change.", + ) + + +class ElsevierPOSTContainerModel(BaseModel): + identifier: str = Field(default="", title="mp_id") + source: str = "MATERIALS_PROJECT" + date: str = datetime.now().date().isoformat().__str__() + title: str + description: str = "" + doi: str + authors: List[str] = ["Kristin Persson"] + url: str + type: str = "dataset" + dateAvailable: str = datetime.now().date().isoformat().__str__() + dateCreated: str = datetime.now().date().isoformat().__str__() + version: str = "1.0.0" + funding: str = "USDOE Office of Science (SC), Basic Energy Sciences (BES) (SC-22)" + language: str = "en" + method: str = "Materials Project" + accessRights: str = "Public" + contact: str = "Kristin Persson " + dataStandard: str = "https://materialsproject.org/citing" + howToCite: str = "https://materialsproject.org/citing" + subjectAreas: List[str] = ["36 MATERIALS SCIENCE"] + keywords: List[str] + institutions: List[str] = ["Lawrence Berkeley National Laboratory"] + institutionIds: List[str] = ["AC02-05CH11231; EDCBEE"] + spatialCoverage: List[str] = [] + temporalCoverage: List[str] = [] + references: List[str] = ["https://materialsproject.org/citing"] + relatedResources: List[str] = ["https://materialsproject.org/citing"] + location: str = "1 Cyclotron Rd, Berkeley, CA 94720" + childContainerIds: List[str] = [] + + @classmethod + def get_url(cls, mp_id): + return "https://materialsproject.org/materials/%s" % mp_id + + @classmethod + def get_keywords(cls, material: MaterialModel): + return ["crystal structure", material.pretty_formula, material.chemsys] + + @classmethod + def get_default_description(cls): + return ( + "Computed materials data using density " + "functional theory calculations. These calculations determine " + "the electronic structure of bulk materials by solving " + "approximations to the Schrodinger equation. For more " + "information, see https://materialsproject.org/docs/calculations" + ) + + @classmethod + def get_date_created(cls, material: MaterialModel) -> str: + return material.created_at.date().__str__() + + @classmethod + def get_date_available(cls, material: MaterialModel) -> str: + return material.created_at.date().__str__() + + @classmethod + def get_title(cls, material: MaterialModel) -> str: + return material.pretty_formula + + @classmethod + def from_material_model(cls, material: MaterialModel, doi: str, description: str): + model = ElsevierPOSTContainerModel( + identifier=material.task_id, + title=material.pretty_formula, + doi=doi, + url="https://materialsproject.org/materials/%s" % material.task_id, + keywords=["crystal structure", material.pretty_formula, material.chemsys], + date=datetime.now().date().__str__(), + dateCreated=material.created_at.date().__str__(), + dateAvailable=ElsevierPOSTContainerModel.get_date_available(material), + description=description, + ) + return model + + +class ExplorerGetJSONResponseModel(BaseModel): + osti_id: str + title: str + report_number: str + doi: str + product_type: str + language: str + country_publication: str + description: str + site_ownership_code: str + publication_date: str + entry_date: str + contributing_organizations: str + authors: List[str] + subjects: List[str] + contributing_org: str + doe_contract_number: str + sponsor_orgs: List[str] + research_orgs: List[str] + links: List[Dict[str, str]] From 3f9f1ef2cb0bd8836121d75a78444bd14df0e378 Mon Sep 17 00:00:00 2001 From: HugoOnghai Date: Thu, 7 Aug 2025 14:07:41 -0700 Subject: [PATCH 26/65] Trying to get the testing-suite running again, there have been changes to ElinkAPI since last testing debug..." --- .gitignore | 10 +++++--- pyproject.toml | 1 + src/mp_cite/core.py | 21 ++++++++++------- src/mp_cite/models.py | 7 +++--- tests/elinkapi_test.py | 39 ++++++++++++------------------- uv.lock | 53 +++++++++++++++++++++++++----------------- 6 files changed, 71 insertions(+), 60 deletions(-) diff --git a/.gitignore b/.gitignore index 8241d4c..c84ff87 100644 --- a/.gitignore +++ b/.gitignore @@ -210,6 +210,10 @@ __marimo__/ # Streamlit .streamlit/secrets.toml -json_pages/ -notebooks/ -test_json_pages/ \ No newline at end of file +# json files for storing production records +*.json +.env + +/json_pages +/notebooks +/test_json_pages diff --git a/pyproject.toml b/pyproject.toml index e93ed31..d589bfb 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -17,6 +17,7 @@ dependencies = [ "elinkapi>=0.4.9", "pydantic>=2.11.7", "pymongo>=4.13.2", + "python-dotenv>=1.1.1", ] [dependency-groups] diff --git a/src/mp_cite/core.py b/src/mp_cite/core.py index 03bacf5..08b0846 100644 --- a/src/mp_cite/core.py +++ b/src/mp_cite/core.py @@ -8,7 +8,7 @@ from elinkapi.utils import Validation -from models import MinimumDARecord +from .models import MinimumDARecord from typing import Literal @@ -69,7 +69,10 @@ def find_out_of_date_doi_entries( def update_existing_osti_record( - elinkapi: Elink, osti_id: OstiID, new_values: dict + elinkapi: Elink, + osti_id: OstiID, + new_values: dict, + new_state: Literal["save", "submit"], ) -> RecordResponse: """ update_existing_osti_record allows users to provide a dictionary of keywords and new values, which will replace the old values under the same keywords in the record with the given osti id @@ -92,14 +95,16 @@ def update_existing_osti_record( Instead, we leave this for the user. """ - record_on_elink = elinkapi.get_single_record(osti_id) + # record_on_elink = elinkapi.get_single_record(osti_id) + + # for keyword in new_values: + # setattr(record_on_elink, keyword, new_values[keyword]) - for keyword in new_values: - setattr(record_on_elink, keyword, new_values[keyword]) + # return elinkapi.update_record( + # osti_id, record_on_elink, state="save" + # ) # user should use update_state_of_osti_record to submit instead - return elinkapi.update_record( - osti_id, record_on_elink, state="save" - ) # user should use update_state_of_osti_record to submit instead + elinkapi.patch_record(osti_id, new_values, new_state) def submit_new_osti_record( diff --git a/src/mp_cite/models.py b/src/mp_cite/models.py index 8bbaf74..e707b76 100644 --- a/src/mp_cite/models.py +++ b/src/mp_cite/models.py @@ -1,10 +1,9 @@ from pydantic import BaseModel, Field, model_validator -from datetime import datetime +from datetime import datetime, timezone, date from elinkapi import Record, Organization, Person from typing import List, Any -import pytz class DOIModel(BaseModel): @@ -78,7 +77,7 @@ class MinimumDARecord(Record): ) site_ownership_code: str = Field(default="LBNL-MP") access_limitations: List[str] = Field(default_factory=lambda: ["UNL"]) - publication_date: datetime = Field( - default_factory=lambda: datetime.now(tz=pytz.UTC) + publication_date: date = Field( + default_factory=lambda: datetime.now(timezone.utc).date() ) site_url: str = Field(default="https://next-gen.materialsproject.org/materials") diff --git a/tests/elinkapi_test.py b/tests/elinkapi_test.py index 2f4eae9..22e4c3b 100644 --- a/tests/elinkapi_test.py +++ b/tests/elinkapi_test.py @@ -1,5 +1,6 @@ import pytest from elinkapi import Elink, Record, exceptions +from elinkapi.record import RecordResponse import sys import os @@ -8,9 +9,9 @@ from datetime import datetime sys.path.insert(0, os.path.abspath(os.path.join(os.path.dirname(__file__), ".."))) -from src.mp_cite.core import ( - make_minimum_record_to_fully_release, -) # cannot find a good workaround for this... +from src.mp_cite.models import ( + MinimumDARecord, +) # cannot find a good workaround for this with relative importing... load_dotenv() @@ -129,18 +130,8 @@ def elink_production_client(): # RECORD ENDPOINTS # Post a new Record @pytest.fixture -def test_post_new_record(elink_review_client): - record_to_post = make_minimum_record_to_fully_release( - title="Test Post Record - PyTest" - ) - # try: - # saved_record = elink_review_client.post_new_record(record_to_post, "save") # Works - saved - # except exceptions.ForbiddenException as fe: - # pytest.fail(f"Forbidden: Check API key or permissions associated with provided API key. {fe}") - # except exceptions.BadRequestException as ve: - # pytest.fail(f"Bad Request: Possibly incorrect parameters. {ve}") - # except Exception as e: - # pytest.fail(f"Unexpected error: {e}") +def test_post_new_record(elink_review_client) -> RecordResponse: + record_to_post = MinimumDARecord(title="Test Post Record - PyTest") try: submitted_record = elink_review_client.post_new_record( @@ -158,9 +149,6 @@ def test_post_new_record(elink_review_client): def test_get_new_single_record(test_post_new_record): - # record_to_post = make_minimum_record_to_fully_release(title="Test Getting New Single Record - PyTest") - # submitted_record = elink_review_client.post_new_record(record_to_post, "submit") - posted_record = test_post_new_record elink_review_api_key = os.getenv("elink_review_api_token") @@ -204,18 +192,21 @@ def test_update_record(test_post_new_record): # Update an existing Record elink_review_client.update_record( osti_id, - make_minimum_record_to_fully_release("Test Updating Record - PyTest"), + MinimumDARecord(title="Test Updating Record - PyTest"), "submit", ) # works # Get Revision based on revision number elink_review_client.get_revision_by_number(osti_id, revision_number) # works - # Get Revision based on date Currently Not Working...? - # revision_by_date = elink_review_client.get_revision_by_date(osti_id, date.strftime("%Y-%d-%m")) # works + + # as of 8/7/2025, elinkapi 0.5.1, these get_all_revisions() calls have stopped working)... + # elink_prod_client = Elink(token=os.getenv("elink_api_PRODUCTION_key")) + # print(elink_prod_client.get_all_revisions(1758063)) + # Get all RevisionHistory of a Record - revision_history = elink_review_client.get_all_revisions(osti_id) # works - revision_history[0] - revision_history[-1] + # revision_history = elink_review_client.get_all_revisions(osti_id) # works + # revision_history[0] + # revision_history[-1] # # MEDIA ENDPOINTS # # Associate new Media with a Record diff --git a/uv.lock b/uv.lock index e6682a7..970822d 100644 --- a/uv.lock +++ b/uv.lock @@ -303,6 +303,7 @@ dependencies = [ { name = "elinkapi" }, { name = "pydantic" }, { name = "pymongo" }, + { name = "python-dotenv" }, ] [package.dev-dependencies] @@ -323,6 +324,7 @@ requires-dist = [ { name = "elinkapi", specifier = ">=0.4.9" }, { name = "pydantic", specifier = ">=2.11.7" }, { name = "pymongo", specifier = ">=4.13.2" }, + { name = "python-dotenv", specifier = ">=1.1.1" }, ] [package.metadata.requires-dev] @@ -608,6 +610,15 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/ec/57/56b9bcc3c9c6a792fcbaf139543cee77261f3651ca9da0c93f5c1221264b/python_dateutil-2.9.0.post0-py2.py3-none-any.whl", hash = "sha256:a8b2bc7bffae282281c8140a97d3aa9c14da0b136dfe83f850eea9a5f7470427", size = 229892, upload-time = "2024-03-01T18:36:18.57Z" }, ] +[[package]] +name = "python-dotenv" +version = "1.1.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/f6/b0/4bc07ccd3572a2f9df7e6782f52b0c6c90dcbb803ac4a167702d7d0dfe1e/python_dotenv-1.1.1.tar.gz", hash = "sha256:a8a6399716257f45be6a007360200409fce5cda2661e3dec71d23dc15f6189ab", size = 41978, upload-time = "2025-06-24T04:21:07.341Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/5f/ed/539768cf28c661b5b068d66d96a2f155c4971a5d55684a514c1a0e0dec2f/python_dotenv-1.1.1-py3-none-any.whl", hash = "sha256:31f23644fe2602f88ff55e1f5c79ba497e01224ee7737937930c448e4d0e24dc", size = 20556, upload-time = "2025-06-24T04:21:06.073Z" }, +] + [[package]] name = "pyyaml" version = "6.0.2" @@ -684,27 +695,27 @@ wheels = [ [[package]] name = "ruff" -version = "0.12.7" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/a1/81/0bd3594fa0f690466e41bd033bdcdf86cba8288345ac77ad4afbe5ec743a/ruff-0.12.7.tar.gz", hash = "sha256:1fc3193f238bc2d7968772c82831a4ff69252f673be371fb49663f0068b7ec71", size = 5197814, upload-time = "2025-07-29T22:32:35.877Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/e1/d2/6cb35e9c85e7a91e8d22ab32ae07ac39cc34a71f1009a6f9e4a2a019e602/ruff-0.12.7-py3-none-linux_armv6l.whl", hash = "sha256:76e4f31529899b8c434c3c1dede98c4483b89590e15fb49f2d46183801565303", size = 11852189, upload-time = "2025-07-29T22:31:41.281Z" }, - { url = "https://files.pythonhosted.org/packages/63/5b/a4136b9921aa84638f1a6be7fb086f8cad0fde538ba76bda3682f2599a2f/ruff-0.12.7-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:789b7a03e72507c54fb3ba6209e4bb36517b90f1a3569ea17084e3fd295500fb", size = 12519389, upload-time = "2025-07-29T22:31:54.265Z" }, - { url = "https://files.pythonhosted.org/packages/a8/c9/3e24a8472484269b6b1821794141f879c54645a111ded4b6f58f9ab0705f/ruff-0.12.7-py3-none-macosx_11_0_arm64.whl", hash = "sha256:2e1c2a3b8626339bb6369116e7030a4cf194ea48f49b64bb505732a7fce4f4e3", size = 11743384, upload-time = "2025-07-29T22:31:59.575Z" }, - { url = "https://files.pythonhosted.org/packages/26/7c/458dd25deeb3452c43eaee853c0b17a1e84169f8021a26d500ead77964fd/ruff-0.12.7-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:32dec41817623d388e645612ec70d5757a6d9c035f3744a52c7b195a57e03860", size = 11943759, upload-time = "2025-07-29T22:32:01.95Z" }, - { url = "https://files.pythonhosted.org/packages/7f/8b/658798472ef260ca050e400ab96ef7e85c366c39cf3dfbef4d0a46a528b6/ruff-0.12.7-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:47ef751f722053a5df5fa48d412dbb54d41ab9b17875c6840a58ec63ff0c247c", size = 11654028, upload-time = "2025-07-29T22:32:04.367Z" }, - { url = "https://files.pythonhosted.org/packages/a8/86/9c2336f13b2a3326d06d39178fd3448dcc7025f82514d1b15816fe42bfe8/ruff-0.12.7-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a828a5fc25a3efd3e1ff7b241fd392686c9386f20e5ac90aa9234a5faa12c423", size = 13225209, upload-time = "2025-07-29T22:32:06.952Z" }, - { url = "https://files.pythonhosted.org/packages/76/69/df73f65f53d6c463b19b6b312fd2391dc36425d926ec237a7ed028a90fc1/ruff-0.12.7-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:5726f59b171111fa6a69d82aef48f00b56598b03a22f0f4170664ff4d8298efb", size = 14182353, upload-time = "2025-07-29T22:32:10.053Z" }, - { url = "https://files.pythonhosted.org/packages/58/1e/de6cda406d99fea84b66811c189b5ea139814b98125b052424b55d28a41c/ruff-0.12.7-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:74e6f5c04c4dd4aba223f4fe6e7104f79e0eebf7d307e4f9b18c18362124bccd", size = 13631555, upload-time = "2025-07-29T22:32:12.644Z" }, - { url = "https://files.pythonhosted.org/packages/6f/ae/625d46d5164a6cc9261945a5e89df24457dc8262539ace3ac36c40f0b51e/ruff-0.12.7-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5d0bfe4e77fba61bf2ccadf8cf005d6133e3ce08793bbe870dd1c734f2699a3e", size = 12667556, upload-time = "2025-07-29T22:32:15.312Z" }, - { url = "https://files.pythonhosted.org/packages/55/bf/9cb1ea5e3066779e42ade8d0cd3d3b0582a5720a814ae1586f85014656b6/ruff-0.12.7-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:06bfb01e1623bf7f59ea749a841da56f8f653d641bfd046edee32ede7ff6c606", size = 12939784, upload-time = "2025-07-29T22:32:17.69Z" }, - { url = "https://files.pythonhosted.org/packages/55/7f/7ead2663be5627c04be83754c4f3096603bf5e99ed856c7cd29618c691bd/ruff-0.12.7-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:e41df94a957d50083fd09b916d6e89e497246698c3f3d5c681c8b3e7b9bb4ac8", size = 11771356, upload-time = "2025-07-29T22:32:20.134Z" }, - { url = "https://files.pythonhosted.org/packages/17/40/a95352ea16edf78cd3a938085dccc55df692a4d8ba1b3af7accbe2c806b0/ruff-0.12.7-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:4000623300563c709458d0ce170c3d0d788c23a058912f28bbadc6f905d67afa", size = 11612124, upload-time = "2025-07-29T22:32:22.645Z" }, - { url = "https://files.pythonhosted.org/packages/4d/74/633b04871c669e23b8917877e812376827c06df866e1677f15abfadc95cb/ruff-0.12.7-py3-none-musllinux_1_2_i686.whl", hash = "sha256:69ffe0e5f9b2cf2b8e289a3f8945b402a1b19eff24ec389f45f23c42a3dd6fb5", size = 12479945, upload-time = "2025-07-29T22:32:24.765Z" }, - { url = "https://files.pythonhosted.org/packages/be/34/c3ef2d7799c9778b835a76189c6f53c179d3bdebc8c65288c29032e03613/ruff-0.12.7-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:a07a5c8ffa2611a52732bdc67bf88e243abd84fe2d7f6daef3826b59abbfeda4", size = 12998677, upload-time = "2025-07-29T22:32:27.022Z" }, - { url = "https://files.pythonhosted.org/packages/77/ab/aca2e756ad7b09b3d662a41773f3edcbd262872a4fc81f920dc1ffa44541/ruff-0.12.7-py3-none-win32.whl", hash = "sha256:c928f1b2ec59fb77dfdf70e0419408898b63998789cc98197e15f560b9e77f77", size = 11756687, upload-time = "2025-07-29T22:32:29.381Z" }, - { url = "https://files.pythonhosted.org/packages/b4/71/26d45a5042bc71db22ddd8252ca9d01e9ca454f230e2996bb04f16d72799/ruff-0.12.7-py3-none-win_amd64.whl", hash = "sha256:9c18f3d707ee9edf89da76131956aba1270c6348bfee8f6c647de841eac7194f", size = 12912365, upload-time = "2025-07-29T22:32:31.517Z" }, - { url = "https://files.pythonhosted.org/packages/4c/9b/0b8aa09817b63e78d94b4977f18b1fcaead3165a5ee49251c5d5c245bb2d/ruff-0.12.7-py3-none-win_arm64.whl", hash = "sha256:dfce05101dbd11833a0776716d5d1578641b7fddb537fe7fa956ab85d1769b69", size = 11982083, upload-time = "2025-07-29T22:32:33.881Z" }, +version = "0.12.8" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/4b/da/5bd7565be729e86e1442dad2c9a364ceeff82227c2dece7c29697a9795eb/ruff-0.12.8.tar.gz", hash = "sha256:4cb3a45525176e1009b2b64126acf5f9444ea59066262791febf55e40493a033", size = 5242373, upload-time = "2025-08-07T19:05:47.268Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/c9/1e/c843bfa8ad1114fab3eb2b78235dda76acd66384c663a4e0415ecc13aa1e/ruff-0.12.8-py3-none-linux_armv6l.whl", hash = "sha256:63cb5a5e933fc913e5823a0dfdc3c99add73f52d139d6cd5cc8639d0e0465513", size = 11675315, upload-time = "2025-08-07T19:05:06.15Z" }, + { url = "https://files.pythonhosted.org/packages/24/ee/af6e5c2a8ca3a81676d5480a1025494fd104b8896266502bb4de2a0e8388/ruff-0.12.8-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:9a9bbe28f9f551accf84a24c366c1aa8774d6748438b47174f8e8565ab9dedbc", size = 12456653, upload-time = "2025-08-07T19:05:09.759Z" }, + { url = "https://files.pythonhosted.org/packages/99/9d/e91f84dfe3866fa648c10512904991ecc326fd0b66578b324ee6ecb8f725/ruff-0.12.8-py3-none-macosx_11_0_arm64.whl", hash = "sha256:2fae54e752a3150f7ee0e09bce2e133caf10ce9d971510a9b925392dc98d2fec", size = 11659690, upload-time = "2025-08-07T19:05:12.551Z" }, + { url = "https://files.pythonhosted.org/packages/fe/ac/a363d25ec53040408ebdd4efcee929d48547665858ede0505d1d8041b2e5/ruff-0.12.8-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c0acbcf01206df963d9331b5838fb31f3b44fa979ee7fa368b9b9057d89f4a53", size = 11896923, upload-time = "2025-08-07T19:05:14.821Z" }, + { url = "https://files.pythonhosted.org/packages/58/9f/ea356cd87c395f6ade9bb81365bd909ff60860975ca1bc39f0e59de3da37/ruff-0.12.8-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ae3e7504666ad4c62f9ac8eedb52a93f9ebdeb34742b8b71cd3cccd24912719f", size = 11477612, upload-time = "2025-08-07T19:05:16.712Z" }, + { url = "https://files.pythonhosted.org/packages/1a/46/92e8fa3c9dcfd49175225c09053916cb97bb7204f9f899c2f2baca69e450/ruff-0.12.8-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cb82efb5d35d07497813a1c5647867390a7d83304562607f3579602fa3d7d46f", size = 13182745, upload-time = "2025-08-07T19:05:18.709Z" }, + { url = "https://files.pythonhosted.org/packages/5e/c4/f2176a310f26e6160deaf661ef60db6c3bb62b7a35e57ae28f27a09a7d63/ruff-0.12.8-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:dbea798fc0065ad0b84a2947b0aff4233f0cb30f226f00a2c5850ca4393de609", size = 14206885, upload-time = "2025-08-07T19:05:21.025Z" }, + { url = "https://files.pythonhosted.org/packages/87/9d/98e162f3eeeb6689acbedbae5050b4b3220754554526c50c292b611d3a63/ruff-0.12.8-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:49ebcaccc2bdad86fd51b7864e3d808aad404aab8df33d469b6e65584656263a", size = 13639381, upload-time = "2025-08-07T19:05:23.423Z" }, + { url = "https://files.pythonhosted.org/packages/81/4e/1b7478b072fcde5161b48f64774d6edd59d6d198e4ba8918d9f4702b8043/ruff-0.12.8-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0ac9c570634b98c71c88cb17badd90f13fc076a472ba6ef1d113d8ed3df109fb", size = 12613271, upload-time = "2025-08-07T19:05:25.507Z" }, + { url = "https://files.pythonhosted.org/packages/e8/67/0c3c9179a3ad19791ef1b8f7138aa27d4578c78700551c60d9260b2c660d/ruff-0.12.8-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:560e0cd641e45591a3e42cb50ef61ce07162b9c233786663fdce2d8557d99818", size = 12847783, upload-time = "2025-08-07T19:05:28.14Z" }, + { url = "https://files.pythonhosted.org/packages/4e/2a/0b6ac3dd045acf8aa229b12c9c17bb35508191b71a14904baf99573a21bd/ruff-0.12.8-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:71c83121512e7743fba5a8848c261dcc454cafb3ef2934a43f1b7a4eb5a447ea", size = 11702672, upload-time = "2025-08-07T19:05:30.413Z" }, + { url = "https://files.pythonhosted.org/packages/9d/ee/f9fdc9f341b0430110de8b39a6ee5fa68c5706dc7c0aa940817947d6937e/ruff-0.12.8-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:de4429ef2ba091ecddedd300f4c3f24bca875d3d8b23340728c3cb0da81072c3", size = 11440626, upload-time = "2025-08-07T19:05:32.492Z" }, + { url = "https://files.pythonhosted.org/packages/89/fb/b3aa2d482d05f44e4d197d1de5e3863feb13067b22c571b9561085c999dc/ruff-0.12.8-py3-none-musllinux_1_2_i686.whl", hash = "sha256:a2cab5f60d5b65b50fba39a8950c8746df1627d54ba1197f970763917184b161", size = 12462162, upload-time = "2025-08-07T19:05:34.449Z" }, + { url = "https://files.pythonhosted.org/packages/18/9f/5c5d93e1d00d854d5013c96e1a92c33b703a0332707a7cdbd0a4880a84fb/ruff-0.12.8-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:45c32487e14f60b88aad6be9fd5da5093dbefb0e3e1224131cb1d441d7cb7d46", size = 12913212, upload-time = "2025-08-07T19:05:36.541Z" }, + { url = "https://files.pythonhosted.org/packages/71/13/ab9120add1c0e4604c71bfc2e4ef7d63bebece0cfe617013da289539cef8/ruff-0.12.8-py3-none-win32.whl", hash = "sha256:daf3475060a617fd5bc80638aeaf2f5937f10af3ec44464e280a9d2218e720d3", size = 11694382, upload-time = "2025-08-07T19:05:38.468Z" }, + { url = "https://files.pythonhosted.org/packages/f6/dc/a2873b7c5001c62f46266685863bee2888caf469d1edac84bf3242074be2/ruff-0.12.8-py3-none-win_amd64.whl", hash = "sha256:7209531f1a1fcfbe8e46bcd7ab30e2f43604d8ba1c49029bb420b103d0b5f76e", size = 12740482, upload-time = "2025-08-07T19:05:40.391Z" }, + { url = "https://files.pythonhosted.org/packages/cb/5c/799a1efb8b5abab56e8a9f2a0b72d12bd64bb55815e9476c7d0a2887d2f7/ruff-0.12.8-py3-none-win_arm64.whl", hash = "sha256:c90e1a334683ce41b0e7a04f41790c429bf5073b62c1ae701c9dc5b3d14f0749", size = 11884718, upload-time = "2025-08-07T19:05:42.866Z" }, ] [[package]] From a0bcc8afb7b15fcd652d019d31004931b952eb85 Mon Sep 17 00:00:00 2001 From: Tyler Mathis <35553152+tsmathis@users.noreply.github.com> Date: Mon, 11 Aug 2025 10:29:43 -0700 Subject: [PATCH 27/65] incorrect import path for MinimumDARecord model import --- src/mp_cite/core.py | 12 ++++-------- 1 file changed, 4 insertions(+), 8 deletions(-) diff --git a/src/mp_cite/core.py b/src/mp_cite/core.py index 0e4d698..6d6790b 100644 --- a/src/mp_cite/core.py +++ b/src/mp_cite/core.py @@ -1,16 +1,12 @@ -from typing import TypeAlias +from typing import Literal, TypeAlias +import requests from elinkapi import Elink from elinkapi.record import RecordResponse -from pymongo import MongoClient - -import requests from elinkapi.utils import Validation +from pymongo import MongoClient - -from mp_cite.doi_builder import MinimumDARecord - -from typing import Literal +from mp_cite.models import MinimumDARecord OstiID: TypeAlias = int From 2f8cbfe57ab03f718c1437bdaefa522e453f4b10 Mon Sep 17 00:00:00 2001 From: HugoOnghai Date: Mon, 11 Aug 2025 11:12:02 -0700 Subject: [PATCH 28/65] Finished test_core.py, found two issues with updating via patch and updating only state to submit --- pyproject.toml | 2 +- test_media_files/another_media_file.txt | 7 -- test_media_files/best_media_file.txt | 3 - test_media_files/media_file.txt | 1 - tests/conf_test.py | 30 +---- tests/elink_service_test.py | 48 +++++-- tests/elinkapi_test.py | 159 ++++++++++++------------ tests/test_core.py | 123 ++++++++++++++++++ tests/test_elink_api.py | 149 ++++++++++++++++++++++ uv.lock | 8 +- 10 files changed, 392 insertions(+), 138 deletions(-) delete mode 100644 test_media_files/another_media_file.txt delete mode 100644 test_media_files/best_media_file.txt delete mode 100644 test_media_files/media_file.txt create mode 100644 tests/test_core.py create mode 100644 tests/test_elink_api.py diff --git a/pyproject.toml b/pyproject.toml index d589bfb..36d9b87 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -14,7 +14,7 @@ maintainers = [ ] requires-python = ">=3.11" dependencies = [ - "elinkapi>=0.4.9", + "elinkapi>=0.5.2", "pydantic>=2.11.7", "pymongo>=4.13.2", "python-dotenv>=1.1.1", diff --git a/test_media_files/another_media_file.txt b/test_media_files/another_media_file.txt deleted file mode 100644 index 9a64dd0..0000000 --- a/test_media_files/another_media_file.txt +++ /dev/null @@ -1,7 +0,0 @@ -WOWWWWWWWWWWWW - - - - - -O \ No newline at end of file diff --git a/test_media_files/best_media_file.txt b/test_media_files/best_media_file.txt deleted file mode 100644 index c708781..0000000 --- a/test_media_files/best_media_file.txt +++ /dev/null @@ -1,3 +0,0 @@ -Not actually the best - -Sue Me \ No newline at end of file diff --git a/test_media_files/media_file.txt b/test_media_files/media_file.txt deleted file mode 100644 index c9d49e9..0000000 --- a/test_media_files/media_file.txt +++ /dev/null @@ -1 +0,0 @@ -This is a media file. The text is here. that is all. \ No newline at end of file diff --git a/tests/conf_test.py b/tests/conf_test.py index 4a25e36..2b198d4 100644 --- a/tests/conf_test.py +++ b/tests/conf_test.py @@ -1,6 +1,6 @@ import os import pytest -from elinkapi import Elink, exceptions +from elinkapi import Elink from dotenv import load_dotenv load_dotenv() @@ -25,31 +25,3 @@ def elink_production_client(): """ elink_prod_api_key = os.getenv("elink_api_PRODUCTION_key") return Elink(token=elink_prod_api_key) - - -def test_get_single_record(elink_production_client): - try: - record = elink_production_client.get_single_record(1190959) - assert record.title == "Materials Data on Si by Materials Project" - assert record.osti_id == 1190959 - except exceptions.ForbiddenException as fe: - pytest.fail( - f"Forbidden: Check API key or permissions associated with provided API key. {fe}" - ) - except exceptions.BadRequestException as ve: - pytest.fail(f"Bad Request: Possibly incorrect parameters. {ve}") - except Exception as e: - pytest.fail(f"Unexpected error: {e}") - - -def test_query_records(elink_production_client): - try: - elink_production_client.query_records() - except exceptions.ForbiddenException as fe: - pytest.fail( - f"Forbidden: Check API key or permissions associated with provided API key. {fe}" - ) - except exceptions.BadRequestException as ve: - pytest.fail(f"Bad Request: Possibly incorrect parameters. {ve}") - except Exception as e: - pytest.fail(f"Unexpected error: {e}") diff --git a/tests/elink_service_test.py b/tests/elink_service_test.py index 3e1c2b1..0a6515d 100644 --- a/tests/elink_service_test.py +++ b/tests/elink_service_test.py @@ -1,7 +1,6 @@ -from elinkapi import Elink +from elinkapi import exceptions from elinkapi.record import RecordResponse import pytest -import os from dotenv import load_dotenv load_dotenv() @@ -16,17 +15,44 @@ # 3. make sure record updates work # 4. deleting records? # 5+. test any other surfaces of the Elink api that we interact with -@pytest.fixture -def elink_review_client(): + + +def test_get_single_record(elink_production_client): + """ + tried to use the production client to retrieve a record. + """ + try: + record = elink_production_client.get_single_record(1190959) + assert record.title == "Materials Data on Si by Materials Project" + assert record.osti_id == 1190959 + except exceptions.ForbiddenException as fe: + pytest.fail( + f"Forbidden: Check API key or permissions associated with provided API key. {fe}" + ) + except exceptions.BadRequestException as ve: + pytest.fail(f"Bad Request: Possibly incorrect parameters. {ve}") + except Exception as e: + pytest.fail(f"Unexpected error: {e}") + + +def test_query_records(elink_production_client): """ - tests whether or not the elink review client can be properly retrieved. - returns the elink review client + tests the query functionality of the elinkapi on the production environment """ - elink_review_api_key = os.getenv("elink_review_api_token") - review_endpoint = os.getenv("ELINK_REVIEW_ENDPOINT") - return Elink(token=elink_review_api_key, target=review_endpoint) + try: + elink_production_client.query_records() + except exceptions.ForbiddenException as fe: + pytest.fail( + f"Forbidden: Check API key or permissions associated with provided API key. {fe}" + ) + except exceptions.BadRequestException as ve: + pytest.fail(f"Bad Request: Possibly incorrect parameters. {ve}") + except Exception as e: + pytest.fail(f"Unexpected error: {e}") -def test_elink_query(elink_review_client): - # placeholder, just to verify gh actions until full test suite is done +def test_query_exists(elink_review_client): + """ + tests to see that the query does in fact resolve entries in the form of RecordResponse objects. + """ assert isinstance(next(elink_review_client.query_records()), RecordResponse) diff --git a/tests/elinkapi_test.py b/tests/elinkapi_test.py index 22e4c3b..9253f97 100644 --- a/tests/elinkapi_test.py +++ b/tests/elinkapi_test.py @@ -1,5 +1,5 @@ import pytest -from elinkapi import Elink, Record, exceptions +from elinkapi import Record, exceptions from elinkapi.record import RecordResponse import sys @@ -8,17 +8,18 @@ from dotenv import load_dotenv from datetime import datetime + sys.path.insert(0, os.path.abspath(os.path.join(os.path.dirname(__file__), ".."))) from src.mp_cite.models import ( MinimumDARecord, ) # cannot find a good workaround for this with relative importing... +import src.mp_cite.core as core load_dotenv() - valid_save_json = { - "title": "Electron microscope data for photons", - "site_ownership_code": "LLNL", + "title": "Test Reserving DOI - PyTest", + "site_ownership_code": "LBNL-MP", "product_type": "TR", "description": "Hello, from teh other side", } @@ -87,32 +88,10 @@ "publication_date": "2018-02-21", "publication_date_text": "Winter 2012", "released_to_osti_date": "2023-03-03", - "site_ownership_code": "LBNL", + "site_ownership_code": "LBNL-MP", "title": "Sample document title", } - -@pytest.fixture -def elink_review_client(): - """ - tests whether or not the elink review client can be properly retrieved. - returns the elink review client - """ - elink_review_api_key = os.getenv("elink_review_api_token") - review_endpoint = os.getenv("ELINK_REVIEW_ENDPOINT") - return Elink(token=elink_review_api_key, target=review_endpoint) - - -@pytest.fixture -def elink_production_client(): - """ - tests whether or not the elink review client can be properly retrieved. - returns the elink review client - """ - elink_prod_api_key = os.getenv("elink_api_PRODUCTION_key") - return Elink(token=elink_prod_api_key) - - osti_id = "2300069" # osti_id = 2300063 media_id = "1900082" @@ -120,9 +99,6 @@ def elink_production_client(): revision_number = "2" date = datetime.now() state = "save" -file_path = "./test_media_files/media_file.txt" -file_path2 = "./test_media_files/best_media_file.txt" -file_path3 = "./test_media_files/another_media_file.txt" json_responses = [] reserved_osti_id = 1 @@ -148,85 +124,104 @@ def test_post_new_record(elink_review_client) -> RecordResponse: pytest.fail(f"Unexpected error: {e}") -def test_get_new_single_record(test_post_new_record): +def test_get_new_single_record(test_post_new_record, elink_review_client): posted_record = test_post_new_record - elink_review_api_key = os.getenv("elink_review_api_token") - review_endpoint = os.getenv("ELINK_REVIEW_ENDPOINT") - elink_review_client = Elink(token=elink_review_api_key, target=review_endpoint) - osti_id = test_post_new_record.osti_id single_record = elink_review_client.get_single_record(osti_id) - assert osti_id is not None - assert single_record.title == posted_record.title + try: + assert osti_id is not None + except Exception: + core.delete_osti_record(elink_review_client, osti_id, "Failed Test") + pytest.fail("Assertion failed!") + + try: + assert single_record.title == posted_record.title + except Exception: + core.delete_osti_record(elink_review_client, osti_id, "Failed Test") + pytest.fail("Assertion failed!") # assert single_record.organizations == record_to_post.organizations # this doesn't work because Elink's pydantic model defaults empty identifier to [], where as an empty identifier field is returned as None. # assert single_record.persons == record_to_post.persons # same issue as above^ - assert single_record.publication_date == posted_record.publication_date + + try: + assert single_record.publication_date == posted_record.publication_date + except Exception: + core.delete_osti_record(elink_review_client, osti_id, "Failed Test") + pytest.fail("Assertion failed!") + + core.delete_osti_record(elink_review_client, osti_id, "Completed Test") def test_invalid_query(elink_production_client): list_of_records = elink_production_client.query_records( title="Allo-ballo holla olah" - ) # works, nothing found + ) # works if nothing found assert list_of_records.total_rows == 0 # Reserve a DOI def test_reserve_DOI(elink_review_client): try: - elink_review_client.reserve_doi(Record(**valid_save_json)) # works - naved - except Exception: - print("failed to reserve doi on record") + rr = elink_review_client.reserve_doi(Record(**valid_save_json)) + except exceptions.ForbiddenException as fe: + core.delete_osti_record(elink_review_client, rr.osti_id, "Completed Test") + pytest.fail( + f"Forbidden: Check API key or permissions associated with provided API key. {fe}" + ) + except exceptions.BadRequestException as ve: + core.delete_osti_record(elink_review_client, rr.osti_id, "Completed Test") + pytest.fail(f"Bad Request: Possibly incorrect parameters. {ve}") + except Exception as e: + core.delete_osti_record(elink_review_client, rr.osti_id, "Completed Test") + pytest.fail(f"Unexpected error: {e}") + core.delete_osti_record(elink_review_client, rr.osti_id, "Completed Test") -def test_update_record(test_post_new_record): + +def test_update_record(test_post_new_record, elink_review_client): posted_record = test_post_new_record osti_id = posted_record.osti_id - elink_review_api_key = os.getenv("elink_review_api_token") - review_endpoint = os.getenv("ELINK_REVIEW_ENDPOINT") - elink_review_client = Elink(token=elink_review_api_key, target=review_endpoint) - # Update an existing Record - elink_review_client.update_record( - osti_id, - MinimumDARecord(title="Test Updating Record - PyTest"), - "submit", - ) # works + try: + elink_review_client.update_record( + osti_id, + MinimumDARecord(title="Test Updating Record - PyTest"), + "submit", + ) + except exceptions.ForbiddenException as fe: + core.delete_osti_record(elink_review_client, osti_id, "Completed Test") + pytest.fail( + f"Forbidden: Check API key or permissions associated with provided API key. {fe}" + ) + except exceptions.BadRequestException as ve: + core.delete_osti_record(elink_review_client, osti_id, "Completed Test") + pytest.fail(f"Bad Request: Possibly incorrect parameters. {ve}") + except Exception as e: + core.delete_osti_record(elink_review_client, osti_id, "Completed Test") + pytest.fail(f"Unexpected error: {e}") # Get Revision based on revision number - elink_review_client.get_revision_by_number(osti_id, revision_number) # works + try: + elink_review_client.get_revision_by_number(osti_id, revision_number) + except Exception: + core.delete_osti_record(elink_review_client, osti_id, "Completed Test") + pytest.fail( + f"Failed to get revision {revision_number} on record with OSTI ID: {osti_id}" + ) # as of 8/7/2025, elinkapi 0.5.1, these get_all_revisions() calls have stopped working)... - # elink_prod_client = Elink(token=os.getenv("elink_api_PRODUCTION_key")) - # print(elink_prod_client.get_all_revisions(1758063)) - # Get all RevisionHistory of a Record - # revision_history = elink_review_client.get_all_revisions(osti_id) # works - # revision_history[0] - # revision_history[-1] - - # # MEDIA ENDPOINTS - # # Associate new Media with a Record - # posted_media = elink_review_client.post_media(osti_id, file_path, {"title": "Title of the Media media_file.txt"}) - # posted_media3 = elink_review_client.post_media(osti_id, file_path3, {"title": "Title of the Media media_file.txt"}) - # media_id = posted_media.media_id - # # Replace existing Media on a Record - # replaced_media2 = elink_review_client.put_media(osti_id, media_id, file_path2, {"title": "Changed this title now"}) - # # Get Media associated with OSTI ID - # media = elink_review_client.get_media(osti_id) - # # Get Media content of a media resource - # media_content = elink_review_client.get_media_content(media_id) - # # Delete Media with media_id off of a Record - # isSuccessDelete = elink_review_client.delete_single_media(osti_id, media_id, reason) #works - # assert isSuccessDelete - # # Delete all Media associated with a Record - # isSuccessAllDelete = elink_review_client.delete_all_media(osti_id, reason) - # assert isSuccessAllDelete - - # # Should see that all media has been deleted - # final_media = elink_review_client.get_media(osti_id) - - # print("Finished") + try: + revision_history = elink_review_client.get_all_revisions(osti_id) # works + revision_history[0] + revision_history[-1] + except Exception: + core.delete_osti_record(elink_review_client, osti_id, "Completed Test") + pytest.fail( + f"Failed to get entire revision history of record with OSTI ID: {osti_id}" + ) + + core.delete_osti_record(elink_review_client, osti_id, "Completed Test") diff --git a/tests/test_core.py b/tests/test_core.py new file mode 100644 index 0000000..2f2be67 --- /dev/null +++ b/tests/test_core.py @@ -0,0 +1,123 @@ +import sys +import os + +sys.path.insert(0, os.path.abspath(os.path.join(os.path.dirname(__file__), ".."))) +import src.mp_cite.core as core + +import pytest + + +def test_update_existing_osti_record(elink_review_client): + record = core.submit_new_osti_record( + elink_review_client, + new_values={"title": "Test Update Existing OSTI Record | Pytest"}, + ) + osti_id = record.osti_id + date_old = record.date_metadata_updated + + try: + assert record.title == "Test Update Existing OSTI Record | Pytest" + assert record.workflow_status == "SO" + assert record.description is None + except Exception: + core.delete_osti_record( + elink_review_client, osti_id, "Unexpected submission..." + ) + pytest.fail("Failed submit new record as expected! Deleting test record...") + + try: + # state = "save" + # patch = { "description": "This is a new robocrys description" } + + # response = requests.patch(f"{elink_review_client.target}/records/{osti_id}/{state}", + # headers = { + # "Authorization" : f"Bearer {elink_review_client.token}", + # "Content-Type": "application/json" + # }, + # data=json.dumps(patch)) + # print("TEST TEST TEST RESPONSE: ", response, "\n", response.text) + + # if response.status_code == 400: + # core.delete_osti_record(elink_review_client, osti_id, "Test Failed!") + # pytest.fail("Failed to patch!") + + record = core.update_existing_osti_record( + elink_review_client, + osti_id, + {"description": "This is a new robocrys description"}, + new_state="save", + ) + + # elink_review_client.patch_record(osti_id, { """description""": """This is a new robocrys description""" }) + + assert record.workflow_status == "SA" + assert record.description == "This is a new robocrys description" + assert record.date_metadata_added > date_old + except Exception: + core.delete_osti_record(elink_review_client, osti_id, "Test Failed!") + pytest.fail("Failed to updated existing osti record! Deleting test record...") + + core.delete_osti_record(elink_review_client, osti_id, "Test Completed.") + + +def test_submit_new_osti_record(elink_review_client): + """ + Submits a record and then retrieves said submitted record. Checks that each keyword-value pair remains matching, since no updates/patches have been made. + """ + + record_submit = core.submit_new_osti_record( + elink_review_client, + new_values={"title": "Test Submit New OSTI Record | Pytest"}, + ) + osti_id = record_submit.osti_id + + record_got = elink_review_client.get_single_record(osti_id) + + for keyword, value in record_got: + if keyword == "workflow_status" or getattr(record_submit, keyword) == value: + # since the workflow_status of submitted osti records changes so quickly in the review environment, we cannot verify that one. + pass + else: + core.delete_osti_record(elink_review_client, osti_id, "Test Completed.") + pytest.fail( + f"The submitted record's {keyword} does not match the retrieved record's {keyword}: {getattr(record_submit, keyword)} != {value}" + ) + + core.delete_osti_record(elink_review_client, osti_id, "Test Completed.") + + +def test_update_state_of_osti_record(elink_review_client): + record_submit = core.submit_new_osti_record( + elink_review_client, + new_values={"title": "SUBMIT ONLY Test Updated State OSTI Record | Pytest"}, + ) + osti_id = record_submit.osti_id + + record_updated_save = core.update_state_of_osti_record( + elink_review_client, osti_id, "save" + ) + try: + assert record_updated_save.workflow_status == "SA" + assert record_updated_save.revision == 2 + except Exception: + core.delete_osti_record( + elink_review_client, osti_id, "Failed to change to saved." + ) + pytest.fail( + f"Failed to updated to save status: Workflow Status at Fail == {record_updated_save.workflow_status} and revision # == {record_updated_save.revision}" + ) + + record_updated_save = core.update_state_of_osti_record( + elink_review_client, osti_id, "submit" + ) + try: + assert record_updated_save.workflow_status == "SO" + assert record_updated_save.revision == 3 + except Exception: + # core.delete_osti_record(elink_review_client, osti_id, "Failed to change to submit.") + pytest.fail( + f"Failed to update to submit status: Workflow Status at Fail == {record_updated_save.workflow_status} and Revision # == {record_updated_save.revision}" + ) + # Need to ask about the desired functionality updating state to submit... + + core.delete_osti_record(elink_review_client, osti_id, "Test Completed.") diff --git a/tests/test_elink_api.py b/tests/test_elink_api.py new file mode 100644 index 0000000..ed55236 --- /dev/null +++ b/tests/test_elink_api.py @@ -0,0 +1,149 @@ +# import os +# from dotenv import load_dotenv + +# from elinkapi import Elink, Record, exceptions +# import pytest + +# from pymongo import MongoClient +# import pymongo + +# load_dotenv() + +# atlas_user = os.environ.get("atlas_user") +# atlas_password = os.environ.get("atlas_password") +# atlas_host = os.environ.get("atlas_host") +# mongo_uri = f"mongodb+srv://{atlas_user}:{atlas_password}@{atlas_host}/" + +# api = Elink(token=os.environ.get("elink_api_PRODUCTION_key")) # target default is production E-link service. + +# # ### Grabbing an existing record + +# # # record = api.get_single_record(mp-id) # test for silicon + +# # # type(record) + +# # # ELinkGotRecordModel = ELinkGetResponseModel.from_elinkapi_record(record) + +# # # print(ELinkGotRecordModel.get_title()) +# # # print(ELinkGotRecordModel.get_site_url()) +# # # print(ELinkGotRecordModel.get_keywords()) +# # # print(ELinkGotRecordModel.get_default_description()) + +# # # ELinkTestGetRecordModel = TestClass(**record.model_dump()) + +# # ### Making a new record + +# # # with MongoClient(mongo_uri) as client: +# # # #get all material_ids and dois from doi collection +# # # doi_collection = client["mp_core"]["dois"] +# # # materials_to_update = list(doi_collection.find({}, {"_id": 0, "material_id": 1, "doi": 1}, limit=10)) +# # # material_ids = [entry["material_id"] for entry in materials_to_update] + +# # # # check # of material_ids from DOI collection vs amount in robocrys + +# # # # get description for material_ids from robocrys collection +# # # coll = client["mp_core_blue"]["robocrys"] +# # # res = list(coll.find({"material_id": {"$in": material_ids}}, {"_id": 0, "material_id": 1, "description": 1})) + +# # # # join on material_id +# # # for doc in res: +# # # mat = next(filter(lambda x: x["material_id"] == doc["material_id"], materials_to_update)) +# # # doc["doi"] = mat["doi"] + + +# # # {"material_id": ..., "doi": ..., "description": ...} -> +# # # Record( +# # # template_fields ..., +# # # doi: ..., +# # # description: ..., +# # # fields_where_material_id_makes_sense: ..., +# # # ) + +# # # with the client open +# # with MongoClient(mongo_uri) as client: +# # # get all dois from the collection +# # doi_collection = client["mp_core"]["dois"] +# # materials_to_update = list( +# # doi_collection.find({}, {"_id": 0, "doi": 1, "material_id": 1}, limit=2) +# # ) + +# # # from the doi collection, grab the material_id and doi of each material +# # material_ids = [entry["material_id"] for entry in materials_to_update] + +# # # additionally, gain the osti id from the doi +# # osti_ids = [entry["doi"].split("10.17188/")[1] for entry in materials_to_update] + +# # # additionally, grab the description of each material from the robocrys +# # coll = client["mp_core_blue"][ +# # "robocrys" +# # ] # grabs robocrys collection from active database +# # res = list( +# # coll.find( +# # {"material_id": {"$in": material_ids}}, +# # {"_id": 0, "material_id": 1, "description": 1}, +# # ) +# # ) # grabs the material id and description of entries in the collection +# # descriptions = [entry["description"] for entry in res] + +# # # for each material (and its material_id, doi, and osti_id) +# # for i in range(len(materials_to_update)): +# # internal_material_id = material_ids[i] +# # internal_osti_id = osti_ids[i] +# # internal_description = descriptions[i] + +# # # get_single_record(osti_id) +# # record = api.get_single_record(internal_osti_id) + +# # print( +# # f"\n \n \nPrinting what is currently on ELINK for {internal_material_id}*****************************************" +# # ) +# # print(record) + +# # with the client open +# with MongoClient(mongo_uri) as client: +# # get all dois from the collection +# doi_collection = client["mp_core"]["dois"] +# materials_to_update = list(doi_collection.find({}, {"_id": 0, "doi": 1, "material_id": 1}, limit=2)) + +# # print( +# # f"\n \n \nPrinting record for {internal_material_id}~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~" +# # ) +# # print(record) + +# # additionally, gain the osti id from the doi +# osti_ids = [entry["doi"].split("10.17188/")[1] for entry in materials_to_update] + +# # additionally, grab the description of each material from the robocrys +# coll = client["mp_core_blue"]["robocrys"] # grabs robocrys collection from active database +# res = list(coll.find({"material_id": {"$in": material_ids}}, {"_id": 0, "material_id": 1, "description": 1})) # grabs the material id and description of entries in the collection +# descriptions = [entry["description"] for entry in res] + +# # for each material (and its material_id, doi, and osti_id) +# for i in range(len(materials_to_update)): +# internal_material_id = material_ids[i] +# internal_osti_id = osti_ids[i] +# internal_description = descriptions[i] + +# # get_single_record(osti_id) +# record = api.get_single_record(internal_osti_id) + +# print(f"\n \n \nPrinting what is currently on ELINK for {internal_material_id}*****************************************") +# print(record) + +# if internal_material_id == record.site_unique_id: +# # update description +# record.description = "testTESTtestTESTtest" + +# print(f"\n \n \nPrinting record for {internal_material_id}~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~") +# print(record) + +# # # post updated record +# # try: +# # saved_record = api.post_new_record(record, "save") +# # except exceptions.BadRequestException as ve: +# # ... +# # # ve.message = "Site Code AAAA is not valid." +# # # ve.errors provides more details: +# # # [{"status":"400", "detail":"Site Code AAAA is not valid.", "source":{"pointer":"site_ownership_code"}}] + + diff --git a/uv.lock b/uv.lock index 970822d..178fa79 100644 --- a/uv.lock +++ b/uv.lock @@ -118,7 +118,7 @@ wheels = [ [[package]] name = "elinkapi" -version = "0.5.1" +version = "0.5.2" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "pydantic" }, @@ -126,9 +126,9 @@ dependencies = [ { name = "requests-toolbelt" }, { name = "urllib3" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/77/11/aa584c66c16a417433a6ac51d232e4cf35a1b5c5a8a747193c73503c8b14/elinkapi-0.5.1.tar.gz", hash = "sha256:33e73648bcb5272e458215698219dcc1c09645f0726798883a2adcdc07f5e00e", size = 51606, upload-time = "2025-08-06T17:49:57.796Z" } +sdist = { url = "https://files.pythonhosted.org/packages/73/39/f652cd398619b6bd9f4a1fc8c9255202c130b36e8c87df1872100f5d744f/elinkapi-0.5.2.tar.gz", hash = "sha256:934134500721aba9e5e37aab232f3c7aa548c87a114ae71f0f0d1b27240dda02", size = 52202, upload-time = "2025-08-08T15:18:00.606Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/86/28/dec8dfc0a2ddd7ba16a90c29bb7c832f9323b5b2c6bb9699244601bdb289/elinkapi-0.5.1-py3-none-any.whl", hash = "sha256:0ab14ed05a5860480697dba860cb684b77cda042006212e597b5c5ec253df481", size = 37695, upload-time = "2025-08-06T17:49:56.434Z" }, + { url = "https://files.pythonhosted.org/packages/1f/da/27cb5c19f8971c18e9a2b2ebd8db56e8f01e718bf0c44bd7cb632574cb12/elinkapi-0.5.2-py3-none-any.whl", hash = "sha256:eb372efc3e6683a6c95e807f3450b51cfe55daff0c268081c606b975a35ee308", size = 37895, upload-time = "2025-08-08T15:17:59.299Z" }, ] [[package]] @@ -321,7 +321,7 @@ lint = [ [package.metadata] requires-dist = [ - { name = "elinkapi", specifier = ">=0.4.9" }, + { name = "elinkapi", specifier = ">=0.5.2" }, { name = "pydantic", specifier = ">=2.11.7" }, { name = "pymongo", specifier = ">=4.13.2" }, { name = "python-dotenv", specifier = ">=1.1.1" }, From 9c9a1981ee2211a5e93ffe27cb97643205c1159c Mon Sep 17 00:00:00 2001 From: HugoOnghai Date: Tue, 12 Aug 2025 15:13:55 -0700 Subject: [PATCH 29/65] Fixed some issue with importing conf_test prefixes, and also added a temporary debug test for updating states... --- tests/elinkapi_test.py | 1 + tests/test_core.py | 95 ++++++++++++++++++++++++++++++++++++++---- 2 files changed, 89 insertions(+), 7 deletions(-) diff --git a/tests/elinkapi_test.py b/tests/elinkapi_test.py index 9253f97..410ac1f 100644 --- a/tests/elinkapi_test.py +++ b/tests/elinkapi_test.py @@ -15,6 +15,7 @@ ) # cannot find a good workaround for this with relative importing... import src.mp_cite.core as core + load_dotenv() valid_save_json = { diff --git a/tests/test_core.py b/tests/test_core.py index 2f2be67..ac093d2 100644 --- a/tests/test_core.py +++ b/tests/test_core.py @@ -4,6 +4,9 @@ sys.path.insert(0, os.path.abspath(os.path.join(os.path.dirname(__file__), ".."))) import src.mp_cite.core as core + +from elinkapi import Organization, Person, Record + import pytest @@ -41,14 +44,16 @@ def test_update_existing_osti_record(elink_review_client): # core.delete_osti_record(elink_review_client, osti_id, "Test Failed!") # pytest.fail("Failed to patch!") - record = core.update_existing_osti_record( - elink_review_client, - osti_id, - {"description": "This is a new robocrys description"}, - new_state="save", - ) + # record = core.update_existing_osti_record( + # elink_review_client, + # osti_id, + # {"description": "This is a new robocrys description"}, + # new_state="save", + # ) - # elink_review_client.patch_record(osti_id, { """description""": """This is a new robocrys description""" }) + elink_review_client.patch_record( + osti_id, {"description": "This is a new description"} + ) assert record.workflow_status == "SA" assert record.description == "This is a new robocrys description" @@ -69,6 +74,7 @@ def test_submit_new_osti_record(elink_review_client): elink_review_client, new_values={"title": "Test Submit New OSTI Record | Pytest"}, ) + osti_id = record_submit.osti_id record_got = elink_review_client.get_single_record(osti_id) @@ -91,6 +97,7 @@ def test_update_state_of_osti_record(elink_review_client): elink_review_client, new_values={"title": "SUBMIT ONLY Test Updated State OSTI Record | Pytest"}, ) + osti_id = record_submit.osti_id record_updated_save = core.update_state_of_osti_record( @@ -121,3 +128,77 @@ def test_update_state_of_osti_record(elink_review_client): # Need to ask about the desired functionality updating state to submit... core.delete_osti_record(elink_review_client, osti_id, "Test Completed.") + + +def test_update_state_debug(elink_review_client): + my_record_dict = { + "product_type": "DA", + "title": "My Dataset", + "organizations": [ + Organization(type="RESEARCHING", name="LBNL Materials Project (LBNL-MP)"), + Organization( + type="SPONSOR", + name="TEST SPONSOR ORG", + identifiers=[{"type": "CN_DOE", "value": "AC02-05CH11231"}], + ), # sponsor org is necessary for submission + ], + "persons": [Person(type="AUTHOR", last_name="Persson")], + "site_ownership_code": "LBNL-MP", + "access_limitations": ["UNL"], + "publication_date": "2025-8-12", + "site_url": "https://next-gen.materialsproject.org/materials", + } + + my_record = Record(**my_record_dict) + + # save in post then update to submit + my_rr = elink_review_client.post_new_record(my_record, "save") + osti_id = my_rr.osti_id + print( + f'After post_new_record(my_record, "save"), my record response workflow_status is {my_rr.workflow_status}' + ) + print(f"Revision Number is {my_rr.revision}") + + got_record = elink_review_client.get_single_record(osti_id) + record_updated_state = elink_review_client.update_record( + osti_id, got_record, "submit" + ) + print( + f'After update_record(osti_id, got_record, "submit"), my record response workflow_status is {record_updated_state.workflow_status}' + ) + print(f"Revision Number is {record_updated_state.revision}\n") + + # submit in post then update to save + record_submit_first = elink_review_client.post_new_record(my_record, "submit") + osti_id = record_submit_first.osti_id + print( + f'Instead of saving, if I post_new_record(my_record, "submit") immediately, then my record response workflow status is {record_submit_first.workflow_status}' + ) + print(f"And revision number is {record_submit_first.revision}") + + got_submitted_record = elink_review_client.get_single_record(osti_id) + record_updated_state = elink_review_client.update_record( + osti_id, got_submitted_record, "submit" + ) + print( + f'After update_record(osti_id, got_record, "save"), my record response workflow_status is {record_updated_state.workflow_status}' + ) + print(f"And the revision number is {record_updated_state.revision}\n") + + # update the workflow_status manually? + record_to_manual_update = elink_review_client.post_new_record(my_record, "save") + osti_id = record_to_manual_update.osti_id + print( + f'As expected, after post_new_record(my_record, "save"), the workflow status is {record_to_manual_update.workflow_status}' + ) + print(f"And the revision number is {record_to_manual_update.revision}") + + got_record_to_manual_update = elink_review_client.get_single_record(osti_id) + got_record_to_manual_update.workflow_status = "SO" + record_after_manual_update = elink_review_client.update_record( + osti_id, got_record_to_manual_update, "submit" + ) + print( + f'After update_record(osti_id, got_record_to_manual_update, "submit"), my record response workflow_status is {record_after_manual_update.workflow_status}' + ) + print(f"Revision Number is {record_after_manual_update.revision}\n") From a7cb018c835189432066697f65c2e9229afb70f8 Mon Sep 17 00:00:00 2001 From: HugoOnghai Date: Tue, 12 Aug 2025 15:36:31 -0700 Subject: [PATCH 30/65] Fixed some issue with importing conf_test prefixes being blocked by pre-commit since they aren't being seen as accessed --- tests/elink_service_test.py | 2 ++ tests/elinkapi_test.py | 1 + tests/test_core.py | 17 +++++------------ 3 files changed, 8 insertions(+), 12 deletions(-) diff --git a/tests/elink_service_test.py b/tests/elink_service_test.py index 0a6515d..7de2c01 100644 --- a/tests/elink_service_test.py +++ b/tests/elink_service_test.py @@ -3,6 +3,8 @@ import pytest from dotenv import load_dotenv +from tests.conf_test import elink_review_client + load_dotenv() # TODO: Write tests that verify our usage of Elink is correct, diff --git a/tests/elinkapi_test.py b/tests/elinkapi_test.py index 410ac1f..443b9a0 100644 --- a/tests/elinkapi_test.py +++ b/tests/elinkapi_test.py @@ -15,6 +15,7 @@ ) # cannot find a good workaround for this with relative importing... import src.mp_cite.core as core +from tests.conf_test import elink_review_client load_dotenv() diff --git a/tests/test_core.py b/tests/test_core.py index ac093d2..7587f43 100644 --- a/tests/test_core.py +++ b/tests/test_core.py @@ -4,6 +4,7 @@ sys.path.insert(0, os.path.abspath(os.path.join(os.path.dirname(__file__), ".."))) import src.mp_cite.core as core +from tests.conf_test import elink_review_client from elinkapi import Organization, Person, Record @@ -154,18 +155,14 @@ def test_update_state_debug(elink_review_client): # save in post then update to submit my_rr = elink_review_client.post_new_record(my_record, "save") osti_id = my_rr.osti_id - print( - f'After post_new_record(my_record, "save"), my record response workflow_status is {my_rr.workflow_status}' - ) + print(f'After post_new_record(my_record, "save"), my record response workflow_status is {my_rr.workflow_status}') print(f"Revision Number is {my_rr.revision}") got_record = elink_review_client.get_single_record(osti_id) record_updated_state = elink_review_client.update_record( osti_id, got_record, "submit" ) - print( - f'After update_record(osti_id, got_record, "submit"), my record response workflow_status is {record_updated_state.workflow_status}' - ) + print(f'After update_record(osti_id, got_record, "submit"), my record response workflow_status is {record_updated_state.workflow_status}') print(f"Revision Number is {record_updated_state.revision}\n") # submit in post then update to save @@ -177,12 +174,8 @@ def test_update_state_debug(elink_review_client): print(f"And revision number is {record_submit_first.revision}") got_submitted_record = elink_review_client.get_single_record(osti_id) - record_updated_state = elink_review_client.update_record( - osti_id, got_submitted_record, "submit" - ) - print( - f'After update_record(osti_id, got_record, "save"), my record response workflow_status is {record_updated_state.workflow_status}' - ) + record_updated_state = elink_review_client.update_record(osti_id, got_submitted_record, "submit") + print(f'After update_record(osti_id, got_submitted_record, "save"), my record response workflow_status is {record_updated_state.workflow_status}') print(f"And the revision number is {record_updated_state.revision}\n") # update the workflow_status manually? From 97a611caa3c79b521ea8bba52657370762fd9f07 Mon Sep 17 00:00:00 2001 From: HugoOnghai Date: Fri, 15 Aug 2025 16:28:46 -0700 Subject: [PATCH 31/65] Cleaning up for final merge --- src/mp_cite/core.py | 19 +++-- tests/elink_service_test.py | 1 - tests/test_elink_api.py | 149 ------------------------------------ 3 files changed, 12 insertions(+), 157 deletions(-) delete mode 100644 tests/test_elink_api.py diff --git a/src/mp_cite/core.py b/src/mp_cite/core.py index 08b0846..1898757 100644 --- a/src/mp_cite/core.py +++ b/src/mp_cite/core.py @@ -95,16 +95,21 @@ def update_existing_osti_record( Instead, we leave this for the user. """ - # record_on_elink = elinkapi.get_single_record(osti_id) + record_on_elink = elinkapi.get_single_record(osti_id) - # for keyword in new_values: - # setattr(record_on_elink, keyword, new_values[keyword]) + for keyword in new_values: + setattr(record_on_elink, keyword, new_values[keyword]) - # return elinkapi.update_record( - # osti_id, record_on_elink, state="save" - # ) # user should use update_state_of_osti_record to submit instead + if new_state == "submit": + # due to bug in elinkapi version<=0.5.2, new_state passing does not update workflow status. + # for now, it is updated manually to 'SO' submitted to OSTI, which is should do itself. + record_on_elink.workflow_status = "SO" - elinkapi.patch_record(osti_id, new_values, new_state) + return elinkapi.update_record(osti_id, record_on_elink, state=new_state) + + # due to bug in elinkapi version<=0.5.2, elinkapi.patch_record fails due to bad casting of dict to str. + # when the next release fixes this, we can change it to this again below. + # elinkapi.patch_record(osti_id, new_values, new_state) def submit_new_osti_record( diff --git a/tests/elink_service_test.py b/tests/elink_service_test.py index 7de2c01..84f761d 100644 --- a/tests/elink_service_test.py +++ b/tests/elink_service_test.py @@ -3,7 +3,6 @@ import pytest from dotenv import load_dotenv -from tests.conf_test import elink_review_client load_dotenv() diff --git a/tests/test_elink_api.py b/tests/test_elink_api.py deleted file mode 100644 index ed55236..0000000 --- a/tests/test_elink_api.py +++ /dev/null @@ -1,149 +0,0 @@ -# import os -# from dotenv import load_dotenv - -# from elinkapi import Elink, Record, exceptions -# import pytest - -# from pymongo import MongoClient -# import pymongo - -# load_dotenv() - -# atlas_user = os.environ.get("atlas_user") -# atlas_password = os.environ.get("atlas_password") -# atlas_host = os.environ.get("atlas_host") -# mongo_uri = f"mongodb+srv://{atlas_user}:{atlas_password}@{atlas_host}/" - -# api = Elink(token=os.environ.get("elink_api_PRODUCTION_key")) # target default is production E-link service. - -# # ### Grabbing an existing record - -# # # record = api.get_single_record(mp-id) # test for silicon - -# # # type(record) - -# # # ELinkGotRecordModel = ELinkGetResponseModel.from_elinkapi_record(record) - -# # # print(ELinkGotRecordModel.get_title()) -# # # print(ELinkGotRecordModel.get_site_url()) -# # # print(ELinkGotRecordModel.get_keywords()) -# # # print(ELinkGotRecordModel.get_default_description()) - -# # # ELinkTestGetRecordModel = TestClass(**record.model_dump()) - -# # ### Making a new record - -# # # with MongoClient(mongo_uri) as client: -# # # #get all material_ids and dois from doi collection -# # # doi_collection = client["mp_core"]["dois"] -# # # materials_to_update = list(doi_collection.find({}, {"_id": 0, "material_id": 1, "doi": 1}, limit=10)) -# # # material_ids = [entry["material_id"] for entry in materials_to_update] - -# # # # check # of material_ids from DOI collection vs amount in robocrys - -# # # # get description for material_ids from robocrys collection -# # # coll = client["mp_core_blue"]["robocrys"] -# # # res = list(coll.find({"material_id": {"$in": material_ids}}, {"_id": 0, "material_id": 1, "description": 1})) - -# # # # join on material_id -# # # for doc in res: -# # # mat = next(filter(lambda x: x["material_id"] == doc["material_id"], materials_to_update)) -# # # doc["doi"] = mat["doi"] - - -# # # {"material_id": ..., "doi": ..., "description": ...} -> -# # # Record( -# # # template_fields ..., -# # # doi: ..., -# # # description: ..., -# # # fields_where_material_id_makes_sense: ..., -# # # ) - -# # # with the client open -# # with MongoClient(mongo_uri) as client: -# # # get all dois from the collection -# # doi_collection = client["mp_core"]["dois"] -# # materials_to_update = list( -# # doi_collection.find({}, {"_id": 0, "doi": 1, "material_id": 1}, limit=2) -# # ) - -# # # from the doi collection, grab the material_id and doi of each material -# # material_ids = [entry["material_id"] for entry in materials_to_update] - -# # # additionally, gain the osti id from the doi -# # osti_ids = [entry["doi"].split("10.17188/")[1] for entry in materials_to_update] - -# # # additionally, grab the description of each material from the robocrys -# # coll = client["mp_core_blue"][ -# # "robocrys" -# # ] # grabs robocrys collection from active database -# # res = list( -# # coll.find( -# # {"material_id": {"$in": material_ids}}, -# # {"_id": 0, "material_id": 1, "description": 1}, -# # ) -# # ) # grabs the material id and description of entries in the collection -# # descriptions = [entry["description"] for entry in res] - -# # # for each material (and its material_id, doi, and osti_id) -# # for i in range(len(materials_to_update)): -# # internal_material_id = material_ids[i] -# # internal_osti_id = osti_ids[i] -# # internal_description = descriptions[i] - -# # # get_single_record(osti_id) -# # record = api.get_single_record(internal_osti_id) - -# # print( -# # f"\n \n \nPrinting what is currently on ELINK for {internal_material_id}*****************************************" -# # ) -# # print(record) - -# # with the client open -# with MongoClient(mongo_uri) as client: -# # get all dois from the collection -# doi_collection = client["mp_core"]["dois"] -# materials_to_update = list(doi_collection.find({}, {"_id": 0, "doi": 1, "material_id": 1}, limit=2)) - -# # print( -# # f"\n \n \nPrinting record for {internal_material_id}~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~" -# # ) -# # print(record) - -# # additionally, gain the osti id from the doi -# osti_ids = [entry["doi"].split("10.17188/")[1] for entry in materials_to_update] - -# # additionally, grab the description of each material from the robocrys -# coll = client["mp_core_blue"]["robocrys"] # grabs robocrys collection from active database -# res = list(coll.find({"material_id": {"$in": material_ids}}, {"_id": 0, "material_id": 1, "description": 1})) # grabs the material id and description of entries in the collection -# descriptions = [entry["description"] for entry in res] - -# # for each material (and its material_id, doi, and osti_id) -# for i in range(len(materials_to_update)): -# internal_material_id = material_ids[i] -# internal_osti_id = osti_ids[i] -# internal_description = descriptions[i] - -# # get_single_record(osti_id) -# record = api.get_single_record(internal_osti_id) - -# print(f"\n \n \nPrinting what is currently on ELINK for {internal_material_id}*****************************************") -# print(record) - -# if internal_material_id == record.site_unique_id: -# # update description -# record.description = "testTESTtestTESTtest" - -# print(f"\n \n \nPrinting record for {internal_material_id}~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~") -# print(record) - -# # # post updated record -# # try: -# # saved_record = api.post_new_record(record, "save") -# # except exceptions.BadRequestException as ve: -# # ... -# # # ve.message = "Site Code AAAA is not valid." -# # # ve.errors provides more details: -# # # [{"status":"400", "detail":"Site Code AAAA is not valid.", "source":{"pointer":"site_ownership_code"}}] - - From 7202973eeb1c3407715321946892b884b97098b7 Mon Sep 17 00:00:00 2001 From: HugoOnghai Date: Tue, 24 Jun 2025 17:09:32 -0700 Subject: [PATCH 32/65] preliminary version of ELinkGetResponseModel made, using ELinkAPI RecordResponse --- legacy/mpcite/models.py | 302 ++++------------------- legacy/mpcite/recordresponse_example.txt | 0 tests/test_elink_api.py | 59 +++++ 3 files changed, 105 insertions(+), 256 deletions(-) create mode 100644 legacy/mpcite/recordresponse_example.txt create mode 100644 tests/test_elink_api.py diff --git a/legacy/mpcite/models.py b/legacy/mpcite/models.py index b2fab65..e4d055f 100644 --- a/legacy/mpcite/models.py +++ b/legacy/mpcite/models.py @@ -1,101 +1,65 @@ -from pydantic import BaseModel, Field +from pydantic import BaseModel, Field, ConfigDict from typing import List, Dict, Optional -from datetime import datetime +import datetime from enum import Enum import bibtexparser - - -class ConnectionModel(BaseModel): - endpoint: str = Field(..., title="URL Endpoint of the connection") - username: str = Field(..., title="User Name") - password: str = Field(..., title="Password") - - -class RoboCrysModel(BaseModel): - material_id: str - last_updated: datetime - description: Optional[str] = None - error: Optional[str] = None - - @classmethod - def get_default_description(cls): - return ( - "Computed materials data using density " - "functional theory calculations. These calculations determine " - "the electronic structure of bulk materials by solving " - "approximations to the Schrodinger equation. For more " - "information, see https://materialsproject.org/docs/calculations" - ) - - -class MaterialModel(BaseModel): - last_updated: datetime = Field( - None, title="timestamp for the most recent calculation" - ) - updated_at: datetime = Field(None, title="alternative to last_updated") - created_at: datetime = Field( - None, - description="creation time for this material defined by when the first structure " - "optimization calculation was run", - ) - task_id: str = Field( - "", title="task id for this material. Also called the material id" - ) - # pretty_formula: str = Field(..., title="clean representation of the formula") - pretty_formula: str = Field(..., title="clean representation of the formula") - chemsys: str - +from elinkapi import Elink, Record +from elinkapi.record import RecordResponse, AccessLimitation, JournalType +from elinkapi.geolocation import Geolocation +from elinkapi.identifier import Identifier +from elinkapi.related_identifier import RelatedIdentifier +from elinkapi.person import Person +from elinkapi.organization import Organization + +class TestClass(RecordResponse): + ... + # stuff class ELinkGetResponseModel(BaseModel): - osti_id: Optional[str] = Field(...) + osti_id: Optional[int] = Field(...) dataset_type: str = Field(default="SM") title: str = Field(...) - creators: str = Field(default="Kristin Persson") # replace with authors + persons: List[Person] contributors: List[Dict[str, str]] = Field( default=[{"first_name": "Materials", "last_name": "Project"}], description="List of Dict of first name, last name mapping", ) # no contributor - product_nos: str = Field(..., title="MP id") - accession_num: str = Field(..., title="MP id") - contract_nos: str = Field("AC02-05CH11231; EDCBEE") - originating_research_org: str = Field( - default="Lawrence Berkeley National Laboratory (LBNL), Berkeley, CA (United States)" - ) - publication_date: str = Field(...) - language: str = Field(default="English") - country: str = Field(default="US") - sponsor_org: str = Field( - default="USDOE Office of Science (SC), Basic Energy Sciences (BES) (SC-22)" - ) + publication_date: datetime.date site_url: str = Field(...) - contact_name: str = Field(default="Kristin Persson") - contact_org: str = Field(default="LBNL") - contact_email: str = Field(default="feedback@materialsproject.org") - contact_phone: str = Field(default="+1(510)486-7218") - related_resource: str = Field("https://materialsproject.org/citing") - contributor_organizations: str = Field(default="MIT; UC Berkeley; Duke; U Louvain") - subject_categories_code: str = Field(default="36 MATERIALS SCIENCE") - keywords: str = Field(...) - description: str = Field(default="") doi: dict = Field( {}, title="DOI info", description="Mainly used during GET request" ) + mp_id: str | None = None + keywords: List[str] = None @classmethod - def get_title(cls, material: MaterialModel): - formula = material.pretty_formula + def from_elinkapi_record(cls, R): + gotResponse = ELinkGetResponseModel( + osti_id = R.osti_id, + title = R.title, + persons = R.persons, + # assume default contributors for now, creators vs contributors? + publication_date = R.publication_date, + site_url = R.site_url, + doi = {"doi": R.doi}, + mp_id = next((id.value for id in R.identifiers if id.type == 'RN'), None), + keywords = R.keywords + ) + + return gotResponse + + def get_title(self): + formula = self.keywords[1] return "Materials Data on %s by Materials Project" % formula - @classmethod - def get_site_url(cls, mp_id): - return "https://materialsproject.org/materials/%s" % mp_id + def get_site_url(self): + return "https://materialsproject.org/materials/%s" % self.mp_id - @classmethod - def get_keywords(cls, material): - keywords = "; ".join( - ["crystal structure", material.pretty_formula, material.chemsys] - ) - return keywords + def get_keywords(self): + # keywords = "; ".join( + # ["crystal structure", material.pretty_formula, material.chemsys] + # ) + return self.keywords @classmethod def get_default_description(cls): @@ -113,11 +77,11 @@ def custom_to_dict(cls, elink_record) -> dict: return elink_record.dict(exclude={"osti_id", "doi"}) else: return elink_record.dict(exclude={"doi"}) - + class ElinkResponseStatusEnum(Enum): - SUCCESS = "SUCCESS" - FAILED = "FAILURE" + SUCCESS = "SUCCESS" + FAILED = "FAILURE" class ELinkPostResponseModel(BaseModel): @@ -142,178 +106,4 @@ def generate_doi_record(self): ) doi_collection_record.set_status(status=self.doi["@status"]) doi_collection_record.last_validated_on = datetime.now() - return doi_collection_record - - -class DOIRecordStatusEnum(str, Enum): - COMPLETED = "COMPLETED" - PENDING = "PENDING" - FAILURE = "FAILURE" - INIT = "INIT" - - -class DOIRecordModel(BaseModel): - material_id: str = Field(...) - doi: str = Field(default="") - bibtex: Optional[str] = None - status: DOIRecordStatusEnum - valid: bool = Field(False) - last_updated: datetime = Field( - default=datetime.now(), - title="DOI last updated time.", - description="Last updated is defined as either a Bibtex or status change.", - ) - created_at: datetime = Field( - default=datetime.now(), - title="DOI Created At", - description="creation time for this DOI record", - ) - last_validated_on: datetime = Field( - default=datetime.now(), - title="Date Last Validated", - description="Date that this data is last validated, " "not necessarily updated", - ) - elsevier_updated_on: datetime = Field( - default=datetime.now(), - title="Date Elsevier is updated", - description="If None, means never uploaded to elsevier", - ) - error: Optional[str] = Field( - default=None, description="None if no error, else error message" - ) - - class Config: - use_enum_values = True - - def set_status(self, status): - self.status = status - - def get_osti_id(self): - if self.doi is None or self.doi == "": - return "" - else: - return self.doi.split("/")[-1] - - def get_bibtex_abstract(self): - try: - if self.bibtex is None: - return "" - bib_db: bibtexparser.bibdatabase.BibDatabase = bibtexparser.loads( - self.bibtex - ) - if bib_db.entries: - return bib_db.entries[0]["abstractnote"] - except Exception as e: - print(e) - return "" - - -class OSTIDOIRecordModel(DOIRecordModel): - material_id: str = Field(...) - doi: str = Field(default="") - bibtex: Optional[str] = None - valid: bool = Field(False) - last_updated: datetime = Field( - default=datetime.now(), - title="DOI last updated time.", - description="Last updated is defined as either a Bibtex or status change.", - ) - - -class ElsevierPOSTContainerModel(BaseModel): - identifier: str = Field(default="", title="mp_id") - source: str = "MATERIALS_PROJECT" - date: str = datetime.now().date().isoformat().__str__() - title: str - description: str = "" - doi: str - authors: List[str] = ["Kristin Persson"] - url: str - type: str = "dataset" - dateAvailable: str = datetime.now().date().isoformat().__str__() - dateCreated: str = datetime.now().date().isoformat().__str__() - version: str = "1.0.0" - funding: str = "USDOE Office of Science (SC), Basic Energy Sciences (BES) (SC-22)" - language: str = "en" - method: str = "Materials Project" - accessRights: str = "Public" - contact: str = "Kristin Persson " - dataStandard: str = "https://materialsproject.org/citing" - howToCite: str = "https://materialsproject.org/citing" - subjectAreas: List[str] = ["36 MATERIALS SCIENCE"] - keywords: List[str] - institutions: List[str] = ["Lawrence Berkeley National Laboratory"] - institutionIds: List[str] = ["AC02-05CH11231; EDCBEE"] - spatialCoverage: List[str] = [] - temporalCoverage: List[str] = [] - references: List[str] = ["https://materialsproject.org/citing"] - relatedResources: List[str] = ["https://materialsproject.org/citing"] - location: str = "1 Cyclotron Rd, Berkeley, CA 94720" - childContainerIds: List[str] = [] - - @classmethod - def get_url(cls, mp_id): - return "https://materialsproject.org/materials/%s" % mp_id - - @classmethod - def get_keywords(cls, material: MaterialModel): - return ["crystal structure", material.pretty_formula, material.chemsys] - - @classmethod - def get_default_description(cls): - return ( - "Computed materials data using density " - "functional theory calculations. These calculations determine " - "the electronic structure of bulk materials by solving " - "approximations to the Schrodinger equation. For more " - "information, see https://materialsproject.org/docs/calculations" - ) - - @classmethod - def get_date_created(cls, material: MaterialModel) -> str: - return material.created_at.date().__str__() - - @classmethod - def get_date_available(cls, material: MaterialModel) -> str: - return material.created_at.date().__str__() - - @classmethod - def get_title(cls, material: MaterialModel) -> str: - return material.pretty_formula - - @classmethod - def from_material_model(cls, material: MaterialModel, doi: str, description: str): - model = ElsevierPOSTContainerModel( - identifier=material.task_id, - title=material.pretty_formula, - doi=doi, - url="https://materialsproject.org/materials/%s" % material.task_id, - keywords=["crystal structure", material.pretty_formula, material.chemsys], - date=datetime.now().date().__str__(), - dateCreated=material.created_at.date().__str__(), - dateAvailable=ElsevierPOSTContainerModel.get_date_available(material), - description=description, - ) - return model - - -class ExplorerGetJSONResponseModel(BaseModel): - osti_id: str - title: str - report_number: str - doi: str - product_type: str - language: str - country_publication: str - description: str - site_ownership_code: str - publication_date: str - entry_date: str - contributing_organizations: str - authors: List[str] - subjects: List[str] - contributing_org: str - doe_contract_number: str - sponsor_orgs: List[str] - research_orgs: List[str] - links: List[Dict[str, str]] + return doi_collection_record \ No newline at end of file diff --git a/legacy/mpcite/recordresponse_example.txt b/legacy/mpcite/recordresponse_example.txt new file mode 100644 index 0000000..e69de29 diff --git a/tests/test_elink_api.py b/tests/test_elink_api.py new file mode 100644 index 0000000..d1ade7d --- /dev/null +++ b/tests/test_elink_api.py @@ -0,0 +1,59 @@ +import os +from dotenv import load_dotenv + +from elinkapi import Elink, Record, exceptions +import pytest +from mpcite.models import ELinkGetResponseModel, TestClass + +from pymongo import MongoClient + + +load_dotenv() + +atlas_user = os.environ.get("atlas_user") +atlas_password = os.environ.get("atlas_password") +atlas_host = os.environ.get("atlas_host") +mongo_uri = f"mongodb+srv://{atlas_user}:{atlas_password}@{atlas_host}/" + +api = Elink(token=os.environ.get("elink_api_key")) # target default is production E-link service. + +record = api.get_single_record(1190959) +type(record) + +ELinkGotRecordModel = ELinkGetResponseModel.from_elinkapi_record(record) + +print(ELinkGotRecordModel.get_title()) +print(ELinkGotRecordModel.get_site_url()) +print(ELinkGotRecordModel.get_keywords()) +print(ELinkGotRecordModel.get_default_description()) + + + +ELinkTestGetRecordModel = TestClass(**record.model_dump()) + +with MongoClient(mongo_uri) as client: + #get all material_ids and dois from doi collection + doi_collection = client["mp_core"]["dois"] + materials_to_update = list(doi_collection.find({}, {"_id": 0, "material_id": 1, "doi": 1}, limit=10)) + material_ids = [entry["material_id"] for entry in materials_to_update] + + # check # of material_ids from DOI collection vs amount in robocrys + + # get description for material_ids from robocrys collection + coll = client["mp_core_blue"]["robocrys"] + res = list(coll.find({"material_id": {"$in": material_ids}}, {"_id": 0, "material_id": 1, "description": 1})) + + # join on material_id + for doc in res: + mat = next(filter(lambda x: x["material_id"] == doc["material_id"], materials_to_update)) + doc["doi"] = mat["doi"] + + +# {"material_id": ..., "doi": ..., "description": ...} -> +# Record( +# template_fields ..., +# doi: ..., +# description: ..., +# fields_where_material_id_makes_sense: ..., +# ) + From 9997f42e7059b118056e6fcac9f51871209df6dd Mon Sep 17 00:00:00 2001 From: HugoOnghai Date: Thu, 26 Jun 2025 18:12:34 -0700 Subject: [PATCH 33/65] queried all desired data entries (stored as batched json files) on ELink found bug with rows greater than 100 on ElinkAPI query_records (144845 dois under 10.17188, 12 are not titled Materials Data On... (edge cases), 144833 Materials have DOIs) --- tests/manage_backfills.py | 49 ++++++++++++++++ tests/outputs.txt | 46 +++++++++++++++ tests/prod_to_review.py | 120 ++++++++++++++++++++++++++++++++++++++ tests/test_elink_api.py | 96 ++++++++++++++++++++++-------- 4 files changed, 288 insertions(+), 23 deletions(-) create mode 100644 tests/manage_backfills.py create mode 100644 tests/outputs.txt create mode 100644 tests/prod_to_review.py diff --git a/tests/manage_backfills.py b/tests/manage_backfills.py new file mode 100644 index 0000000..a835456 --- /dev/null +++ b/tests/manage_backfills.py @@ -0,0 +1,49 @@ +# This script will see how many documents in ELink, i.e. ones with a DOI, are not accounted for in the internal DOI collection. + +from elinkapi import Elink, Query, Record + +import os +from dotenv import load_dotenv + +load_dotenv() # depends on the root directory from which you run your python scripts. + +api = Elink(token = os.environ.get("elink_api_PRODUCTION_key")) + + +query1 = api.query_records(rows=1000) + +materials_with_dois : list[Record] = [] + +for page in query1: + print(f"Now on Page: {page.title}") + print(f"Material_ID: {page.site_unique_id} and DOI: http://doi.org/{page.doi}") + + if page.site_unique_id.startswith("mp-"): + materials_with_dois.append(page) + + # for record in page.data: + # if record.site_unique_id.startswith("mp-"): + # materials_with_dois.append(record) + + + +# set_q1 = [page for page in query1] +# set_q2 = [page for page in query2] + +# set_diffq1q2 = set(set_q1) - set(set_q2) +# print (f"Difference matched {len(set)} records") + +# filtered = [ +# page for page in query1 +# if page.title.lower().startswith("materials data on") +# ] + +# print (f"Filtered Query1 has {len(filtered)} records") + +# paginate through ALL results +# for page in query1: +# print(page.title) +# print(f"Material_ID: {page.site_unique_id} and DOI: http://doi.org/{page.doi}") + +# for record in page.data: +# print (f"OSTI ID: {record.osti_id} Title: {record.title}") \ No newline at end of file diff --git a/tests/outputs.txt b/tests/outputs.txt new file mode 100644 index 0000000..8d188e7 --- /dev/null +++ b/tests/outputs.txt @@ -0,0 +1,46 @@ +(mpcite-env) C:\Users\ongha\OneDrive\Documents\GitHub\MPCite>C:/Users/ongha/anaconda3/envs/mpcite-env/python.exe c:/Users/ongha/OneDrive/Documents/GitHub/MPCite/tests/prod_to_review.py + +Query retrieved 144845 record(s) +Page finished. Now at 500 data entries. 0 edge cases found. +Page finished. Now at 1000 data entries. 0 edge cases found. +Page finished. Now at 1500 data entries. 0 edge cases found. +Page finished. Now at 2000 data entries. 0 edge cases found. +Page finished. Now at 2500 data entries. 0 edge cases found. +Page finished. Now at 3000 data entries. 0 edge cases found. +Page finished. Now at 3500 data entries. 0 edge cases found. +Page finished. Now at 4000 data entries. 0 edge cases found. +Page finished. Now at 4500 data entries. 0 edge cases found. +Page finished. Now at 5000 data entries. 0 edge cases found. +Page finished. Now at 5500 data entries. 0 edge cases found. +Page finished. Now at 6000 data entries. 0 edge cases found. +Page finished. Now at 6500 data entries. 0 edge cases found. +Page finished. Now at 7000 data entries. 0 edge cases found. +Page finished. Now at 7500 data entries. 0 edge cases found. +Page finished. Now at 8000 data entries. 0 edge cases found. +Page finished. Now at 8500 data entries. 0 edge cases found. +Page finished. Now at 9000 data entries. 0 edge cases found. +Page finished. Now at 9500 data entries. 0 edge cases found. +Page finished. Now at 10000 data entries. 0 edge cases found. +Page finished. Now at 10500 data entries. 0 edge cases found. +Page finished. Now at 11000 data entries. 0 edge cases found. +Page finished. Now at 11500 data entries. 0 edge cases found. +Page finished. Now at 12000 data entries. 0 edge cases found. +Page finished. Now at 12500 data entries. 0 edge cases found. +Page finished. Now at 13000 data entries. 0 edge cases found. +Page finished. Now at 13500 data entries. 0 edge cases found. +Page finished. Now at 14000 data entries. 0 edge cases found. +Page finished. Now at 14500 data entries. 0 edge cases found. + +Traceback (most recent call last): + File "C:\Users\ongha\anaconda3\envs\mpcite-env\Lib\site-packages\elinkapi\query.py", line 95, in __next__ + record = self.data.pop() +IndexError: pop from empty list + +During handling of the above exception, another exception occurred: + +Traceback (most recent call last): + File "c:\Users\ongha\OneDrive\Documents\GitHub\MPCite\tests\prod_to_review.py", line 29, in + record = next(query) + File "C:\Users\ongha\anaconda3\envs\mpcite-env\Lib\site-packages\elinkapi\query.py", line 108, in __next__ + raise StopIteration +StopIteration \ No newline at end of file diff --git a/tests/prod_to_review.py b/tests/prod_to_review.py new file mode 100644 index 0000000..87e311d --- /dev/null +++ b/tests/prod_to_review.py @@ -0,0 +1,120 @@ +from elinkapi import Elink, Query, Record + +import os +from dotenv import load_dotenv + +import json + +load_dotenv() # depends on the root directory from which you run your python scripts. + +review_endpoint = "https://review.osti.gov/elink2api/" + +prod_api = Elink(token = os.environ.get("elink_api_PRODUCTION_key")) +review_api = Elink(token = os.environ.get("elink_review_api_token"), target=review_endpoint) + +print(prod_api.query_records()) + +rows_per_page = 100 + +# query production +query = prod_api.query_records(rows=rows_per_page) +print(f"Query retrieved {query.total_rows} record(s)") + +count_materials_data = 0 +count_MaterialsDataOn = 0 +cwd = os.getcwd() +page_number = 0 +page_json_list = [] + +for record in query: + # increment counter + count_materials_data = count_materials_data + 1 + print(f"On record #{count_materials_data}, next url is {query.next_url}, previous url is {query.previous_url}") + + # see if the record is a Materials Data on record + if record.title.startswith("Materials Data on"): + # increment the MaterialsDataOn counter + count_MaterialsDataOn = count_MaterialsDataOn + 1 + + # prepare the new record for the review environment, remove the OSTI ID, and add its model_dump to the list of json objects for the page. + new_record = record + new_record_dict = new_record.model_dump(exclude_none=True) + + new_record_osti_id = new_record_dict.pop("osti_id") # now new_record_dict does not have the osti_id key. + js = json.dumps(new_record_dict, default=str) # datetime objects are not JSON serializable, so we use default=str to convert them to strings. + + page_json_list.append(js) + + # TODO: take the new_record_dict and make it into a new post to the review environment and save the RecordResponse. + + else: + print(f"Found edge case: {record.title}") + + if count_materials_data % rows_per_page == 0: + # create/open, write, and close new json file + page_number = count_materials_data / rows_per_page + path = f'/json_pages/page_number_{page_number}' + fp = open(cwd+path, 'a') + + for js in page_json_list: + fp.write(js) + fp.write("\n") + + fp.close() + page_json_list = [] + + print(f"Page {page_number} finished. Now at {count_materials_data} data entries. {count_materials_data - count_MaterialsDataOn} edge cases found.") + +# print remainder of records if not a full page after for loop exits +page_number = page_number + 1 +path = f'/json_pages/page_number_{page_number}' +fp = open(cwd+path, 'a') +for js in page_json_list: + fp.write(js) + fp.write("\n") +fp.close() + +# # if contains materials data on, then add to batch +# for count_materials_data < query.total_rows: + +# # print(f"The length of the query is now {len(query.data)}") +# record = next(query) +# count_materials_data = count_materials_data + 1 + +# if record.title.startswith("Materials Data on"): +# count_MaterialsDataOn = count_MaterialsDataOn + 1 + +# new_record = record +# new_record_dict = new_record.model_dump(exclude_none=True) + +# new_record_osti_id = new_record_dict.pop("osti_id") + +# page_dict[f"Entry OSTI_ID {new_record_osti_id}"] = new_record_dict + +# # TODO: take the new_record_dict and make it into a new post to the review environment and save the RecordResponse. + + + +# if count_materials_data % rows_per_page == 0: +# # if a page has been fully consummed, then print the new batched dictionary to a json file. + +# js = json.dumps(page_dict, default=str) + +# # open new json file if not exist it will create +# cwd = os.getcwd() +# path = f'/json_pages/page_number_{count_materials_data/rows_per_page}' +# fp = open(cwd+path, 'a') + +# # write to json file +# fp.write(js) + +# # close the connection to the file and empty the dict +# fp.close() +# page_dict = {} + +# print(f"Page {(count_materials_data / rows_per_page)} finished. Now at {count_materials_data} data entries. {count_materials_data - count_MaterialsDataOn} edge cases found.") + +# model_dump exclude_none=True, remove null keys +# pop osti_id --> save batch to json files +# make new record +# post to review_api diff --git a/tests/test_elink_api.py b/tests/test_elink_api.py index d1ade7d..80afba7 100644 --- a/tests/test_elink_api.py +++ b/tests/test_elink_api.py @@ -6,7 +6,7 @@ from mpcite.models import ELinkGetResponseModel, TestClass from pymongo import MongoClient - +import pymongo load_dotenv() @@ -15,38 +15,41 @@ atlas_host = os.environ.get("atlas_host") mongo_uri = f"mongodb+srv://{atlas_user}:{atlas_password}@{atlas_host}/" -api = Elink(token=os.environ.get("elink_api_key")) # target default is production E-link service. +api = Elink(token=os.environ.get("elink_api_PRODUCTION_key")) # target default is production E-link service. -record = api.get_single_record(1190959) -type(record) +### Grabbing an existing record -ELinkGotRecordModel = ELinkGetResponseModel.from_elinkapi_record(record) +# record = api.get_single_record(mp-id) # test for silicon -print(ELinkGotRecordModel.get_title()) -print(ELinkGotRecordModel.get_site_url()) -print(ELinkGotRecordModel.get_keywords()) -print(ELinkGotRecordModel.get_default_description()) +# type(record) +# ELinkGotRecordModel = ELinkGetResponseModel.from_elinkapi_record(record) +# print(ELinkGotRecordModel.get_title()) +# print(ELinkGotRecordModel.get_site_url()) +# print(ELinkGotRecordModel.get_keywords()) +# print(ELinkGotRecordModel.get_default_description()) -ELinkTestGetRecordModel = TestClass(**record.model_dump()) +# ELinkTestGetRecordModel = TestClass(**record.model_dump()) -with MongoClient(mongo_uri) as client: - #get all material_ids and dois from doi collection - doi_collection = client["mp_core"]["dois"] - materials_to_update = list(doi_collection.find({}, {"_id": 0, "material_id": 1, "doi": 1}, limit=10)) - material_ids = [entry["material_id"] for entry in materials_to_update] +### Making a new record + +# with MongoClient(mongo_uri) as client: +# #get all material_ids and dois from doi collection +# doi_collection = client["mp_core"]["dois"] +# materials_to_update = list(doi_collection.find({}, {"_id": 0, "material_id": 1, "doi": 1}, limit=10)) +# material_ids = [entry["material_id"] for entry in materials_to_update] - # check # of material_ids from DOI collection vs amount in robocrys +# # check # of material_ids from DOI collection vs amount in robocrys - # get description for material_ids from robocrys collection - coll = client["mp_core_blue"]["robocrys"] - res = list(coll.find({"material_id": {"$in": material_ids}}, {"_id": 0, "material_id": 1, "description": 1})) +# # get description for material_ids from robocrys collection +# coll = client["mp_core_blue"]["robocrys"] +# res = list(coll.find({"material_id": {"$in": material_ids}}, {"_id": 0, "material_id": 1, "description": 1})) - # join on material_id - for doc in res: - mat = next(filter(lambda x: x["material_id"] == doc["material_id"], materials_to_update)) - doc["doi"] = mat["doi"] +# # join on material_id +# for doc in res: +# mat = next(filter(lambda x: x["material_id"] == doc["material_id"], materials_to_update)) +# doc["doi"] = mat["doi"] # {"material_id": ..., "doi": ..., "description": ...} -> @@ -57,3 +60,50 @@ # fields_where_material_id_makes_sense: ..., # ) +# with the client open +with MongoClient(mongo_uri) as client: + # get all dois from the collection + doi_collection = client["mp_core"]["dois"] + materials_to_update = list(doi_collection.find({}, {"_id": 0, "doi": 1, "material_id": 1}, limit=2)) + + # from the doi collection, grab the material_id and doi of each material + material_ids = [entry["material_id"] for entry in materials_to_update] + + # additionally, gain the osti id from the doi + osti_ids = [entry["doi"].split("10.17188/")[1] for entry in materials_to_update] + + # additionally, grab the description of each material from the robocrys + coll = client["mp_core_blue"]["robocrys"] # grabs robocrys collection from active database + res = list(coll.find({"material_id": {"$in": material_ids}}, {"_id": 0, "material_id": 1, "description": 1})) # grabs the material id and description of entries in the collection + descriptions = [entry["description"] for entry in res] + + # for each material (and its material_id, doi, and osti_id) + for i in range(len(materials_to_update)): + internal_material_id = material_ids[i] + internal_osti_id = osti_ids[i] + internal_description = descriptions[i] + + # get_single_record(osti_id) + record = api.get_single_record(internal_osti_id) + + print(f"\n \n \nPrinting what is currently on ELINK for {internal_material_id}*****************************************") + print(record) + + if internal_material_id == record.site_unique_id: + # update description + record.description = "testTESTtestTESTtest" + + print(f"\n \n \nPrinting record for {internal_material_id}~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~") + print(record) + + # # post updated record + # try: + # saved_record = api.post_new_record(record, "save") + # except exceptions.BadRequestException as ve: + # ... + # # ve.message = "Site Code AAAA is not valid." + # # ve.errors provides more details: + # # [{"status":"400", "detail":"Site Code AAAA is not valid.", "source":{"pointer":"site_ownership_code"}}] + + + From eafcae89d762ba3467100cf743b90111e9fa23ac Mon Sep 17 00:00:00 2001 From: HugoOnghai <99376417+HugoOnghai@users.noreply.github.com> Date: Fri, 11 Jul 2025 17:01:29 -0700 Subject: [PATCH 34/65] Merged upstream (#1) * move old code to 'legacy' * setup project using uv * add license * testing skeleton * gh actions skeleton * remove old reqs file to prevent dependabot alerts --------- Co-authored-by: Tyler Mathis <35553152+tsmathis@users.noreply.github.com> --- .github/workflows/lint.yml | 4 ++ .github/workflows/release.yml | 8 +++ .github/workflows/testing.yml | 5 ++ legacy/mpcite/models.py | 109 -------------------------------- src/mp_cite/core.py | 1 - src/mp_cite/models.py | 112 +++++++++++++++++++++++++++++++++ src/mp_cite/send_collection.py | 79 +++++++++++++++++++++++ uv.lock | 90 +++++++++++++------------- 8 files changed, 253 insertions(+), 155 deletions(-) delete mode 100644 legacy/mpcite/models.py create mode 100644 src/mp_cite/send_collection.py diff --git a/.github/workflows/lint.yml b/.github/workflows/lint.yml index 1a76e83..c854112 100644 --- a/.github/workflows/lint.yml +++ b/.github/workflows/lint.yml @@ -5,6 +5,7 @@ on: branches: [master] pull_request: branches: [master] +<<<<<<< HEAD workflow_dispatch: jobs: @@ -21,3 +22,6 @@ jobs: - name: Analyzing the code with ruff run: uvx ruff check --output-format=github +======= +# TODO: setup linting with uv/ruff +>>>>>>> 52382ff (Merged upstream (#1)) diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index e5d2a28..6f3c42d 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -1,6 +1,7 @@ name: release on: +<<<<<<< HEAD release: types: [published] @@ -70,3 +71,10 @@ jobs: - name: Build and Deploy! run: uvx mkdocs gh-deploy +======= + push: + branches: [master] + pull_request: + branches: [master] +# TODO: setup release to pypi +>>>>>>> 52382ff (Merged upstream (#1)) diff --git a/.github/workflows/testing.yml b/.github/workflows/testing.yml index 67e0f21..9f71a9e 100644 --- a/.github/workflows/testing.yml +++ b/.github/workflows/testing.yml @@ -17,6 +17,11 @@ jobs: runs-on: ${{ matrix.os }} steps: - uses: actions/checkout@v4 +<<<<<<< HEAD +======= + with: + fetch-depth: 0 +>>>>>>> 52382ff (Merged upstream (#1)) - name: Install uv uses: astral-sh/setup-uv@v6 diff --git a/legacy/mpcite/models.py b/legacy/mpcite/models.py deleted file mode 100644 index e4d055f..0000000 --- a/legacy/mpcite/models.py +++ /dev/null @@ -1,109 +0,0 @@ -from pydantic import BaseModel, Field, ConfigDict -from typing import List, Dict, Optional -import datetime -from enum import Enum -import bibtexparser -from elinkapi import Elink, Record -from elinkapi.record import RecordResponse, AccessLimitation, JournalType -from elinkapi.geolocation import Geolocation -from elinkapi.identifier import Identifier -from elinkapi.related_identifier import RelatedIdentifier -from elinkapi.person import Person -from elinkapi.organization import Organization - -class TestClass(RecordResponse): - ... - # stuff - -class ELinkGetResponseModel(BaseModel): - osti_id: Optional[int] = Field(...) - dataset_type: str = Field(default="SM") - title: str = Field(...) - persons: List[Person] - contributors: List[Dict[str, str]] = Field( - default=[{"first_name": "Materials", "last_name": "Project"}], - description="List of Dict of first name, last name mapping", - ) # no contributor - publication_date: datetime.date - site_url: str = Field(...) - doi: dict = Field( - {}, title="DOI info", description="Mainly used during GET request" - ) - mp_id: str | None = None - keywords: List[str] = None - - @classmethod - def from_elinkapi_record(cls, R): - gotResponse = ELinkGetResponseModel( - osti_id = R.osti_id, - title = R.title, - persons = R.persons, - # assume default contributors for now, creators vs contributors? - publication_date = R.publication_date, - site_url = R.site_url, - doi = {"doi": R.doi}, - mp_id = next((id.value for id in R.identifiers if id.type == 'RN'), None), - keywords = R.keywords - ) - - return gotResponse - - def get_title(self): - formula = self.keywords[1] - return "Materials Data on %s by Materials Project" % formula - - def get_site_url(self): - return "https://materialsproject.org/materials/%s" % self.mp_id - - def get_keywords(self): - # keywords = "; ".join( - # ["crystal structure", material.pretty_formula, material.chemsys] - # ) - return self.keywords - - @classmethod - def get_default_description(cls): - return ( - "Computed materials data using density " - "functional theory calculations. These calculations determine " - "the electronic structure of bulk materials by solving " - "approximations to the Schrodinger equation. For more " - "information, see https://materialsproject.org/docs/calculations" - ) - - @classmethod - def custom_to_dict(cls, elink_record) -> dict: - if elink_record.osti_id is None or elink_record.osti_id == "": - return elink_record.dict(exclude={"osti_id", "doi"}) - else: - return elink_record.dict(exclude={"doi"}) - - -class ElinkResponseStatusEnum(Enum): - SUCCESS = "SUCCESS" - FAILED = "FAILURE" - - -class ELinkPostResponseModel(BaseModel): - osti_id: str - accession_num: str - product_nos: str - title: str - contract_nos: str - other_identifying_nos: Optional[str] - doi: Dict[str, str] - status: ElinkResponseStatusEnum - status_message: Optional[str] - - def generate_doi_record(self): - doi_collection_record = DOIRecordModel( - material_id=self.accession_num, - doi=self.doi["#text"], - status=self.doi["@status"], - bibtex=None, - valid=True, - last_validated_on=datetime.now(), - ) - doi_collection_record.set_status(status=self.doi["@status"]) - doi_collection_record.last_validated_on = datetime.now() - return doi_collection_record \ No newline at end of file diff --git a/src/mp_cite/core.py b/src/mp_cite/core.py index 6d6790b..db0de7a 100644 --- a/src/mp_cite/core.py +++ b/src/mp_cite/core.py @@ -10,7 +10,6 @@ OstiID: TypeAlias = int - def find_out_of_date_doi_entries( rc_client: MongoClient, doi_client: MongoClient, diff --git a/src/mp_cite/models.py b/src/mp_cite/models.py index 8bbaf74..71b4c9a 100644 --- a/src/mp_cite/models.py +++ b/src/mp_cite/models.py @@ -1,3 +1,4 @@ +<<<<<<< HEAD from pydantic import BaseModel, Field, model_validator from datetime import datetime @@ -82,3 +83,114 @@ class MinimumDARecord(Record): default_factory=lambda: datetime.now(tz=pytz.UTC) ) site_url: str = Field(default="https://next-gen.materialsproject.org/materials") +======= +from pydantic import BaseModel, Field, ConfigDict +from typing import List, Dict, Optional +import datetime +from enum import Enum +import bibtexparser +from elinkapi import Elink, Record +from elinkapi.record import RecordResponse, AccessLimitation, JournalType +from elinkapi.geolocation import Geolocation +from elinkapi.identifier import Identifier +from elinkapi.related_identifier import RelatedIdentifier +from elinkapi.person import Person +from elinkapi.organization import Organization + +class TestClass(RecordResponse): + ... + # stuff + +class ELinkGetResponseModel(BaseModel): + osti_id: Optional[int] = Field(...) + dataset_type: str = Field(default="SM") + title: str = Field(...) + persons: List[Person] + contributors: List[Dict[str, str]] = Field( + default=[{"first_name": "Materials", "last_name": "Project"}], + description="List of Dict of first name, last name mapping", + ) # no contributor + publication_date: datetime.date + site_url: str = Field(...) + doi: dict = Field( + {}, title="DOI info", description="Mainly used during GET request" + ) + mp_id: str | None = None + keywords: List[str] = None + + @classmethod + def from_elinkapi_record(cls, R): + gotResponse = ELinkGetResponseModel( + osti_id = R.osti_id, + title = R.title, + persons = R.persons, + # assume default contributors for now, creators vs contributors? + publication_date = R.publication_date, + site_url = R.site_url, + doi = {"doi": R.doi}, + mp_id = next((id.value for id in R.identifiers if id.type == 'RN'), None), + keywords = R.keywords + ) + + return gotResponse + + def get_title(self): + formula = self.keywords[1] + return "Materials Data on %s by Materials Project" % formula + + def get_site_url(self): + return "https://materialsproject.org/materials/%s" % self.mp_id + + def get_keywords(self): + # keywords = "; ".join( + # ["crystal structure", material.pretty_formula, material.chemsys] + # ) + return self.keywords + + @classmethod + def get_default_description(cls): + return ( + "Computed materials data using density " + "functional theory calculations. These calculations determine " + "the electronic structure of bulk materials by solving " + "approximations to the Schrodinger equation. For more " + "information, see https://materialsproject.org/docs/calculations" + ) + + @classmethod + def custom_to_dict(cls, elink_record) -> dict: + if elink_record.osti_id is None or elink_record.osti_id == "": + return elink_record.dict(exclude={"osti_id", "doi"}) + else: + return elink_record.dict(exclude={"doi"}) + + +class ElinkResponseStatusEnum(Enum): + SUCCESS = "SUCCESS" + FAILED = "FAILURE" + + +class ELinkPostResponseModel(BaseModel): + osti_id: str + accession_num: str + product_nos: str + title: str + contract_nos: str + other_identifying_nos: Optional[str] + doi: Dict[str, str] + status: ElinkResponseStatusEnum + status_message: Optional[str] + + def generate_doi_record(self): + doi_collection_record = DOIRecordModel( + material_id=self.accession_num, + doi=self.doi["#text"], + status=self.doi["@status"], + bibtex=None, + valid=True, + last_validated_on=datetime.now(), + ) + doi_collection_record.set_status(status=self.doi["@status"]) + doi_collection_record.last_validated_on = datetime.now() + return doi_collection_record +>>>>>>> 5fa46e4 (Merged upstream (#1)) diff --git a/src/mp_cite/send_collection.py b/src/mp_cite/send_collection.py new file mode 100644 index 0000000..0ce65a3 --- /dev/null +++ b/src/mp_cite/send_collection.py @@ -0,0 +1,79 @@ +from pathlib import Path +from xml.dom.minidom import parseString +from dicttoxml import dicttoxml +from mpcite.doi_builder import DOIBuilder +import json +from monty.json import MontyDecoder +from pydantic import BaseModel, Field +from typing import List + +default_description = ( + "Computed materials data using density functional theory calculations. These " + "calculations determine the electronic structure of bulk materials by solving " + "approximations to the Schrodinger equation. For more information, " + "see https://materialsproject.org/docs/calculations" +) + + +class CollectionsModel(BaseModel): + title: str = Field(default="Sample Title") + product_type: str = Field(default="DC") + relidentifiersblock: List[List[str]] = Field() + contributors: List[dict] + description: str = Field(default=default_description) + site_url: str = Field(default="https://materialsproject.org/") + + +config_file = Path("/Users/michaelwu/Desktop/projects/MPCite/files/config_prod.json") + +bld: DOIBuilder = json.load(config_file.open("r"), cls=MontyDecoder) +bld.config_file_path = config_file.as_posix() + +records = [ + CollectionsModel( + relidentifiersblock=[["mp-1", "mp-2", "mp-1"]], + contributors=[ + { + "first_name": "Michael", + "last_name": "Wu", + "email": "wuxiaohua1011@berkeley.edu", + } + ], + ).dict(), + CollectionsModel( + relidentifiersblock=[["mp-21"], ["mp-22"]], + contributors=[ + { + "first_name": "Michael", + "last_name": "Wu", + "email": "wuxiaohua1011@berkeley.edu", + } + ], + ).dict(), +] + + +def my_item_func(x): + if x == "records": + return "record" + elif x == "contributors": + return "contributor" + elif x == "relidentifier_detail": + return "related_identifier" + elif x == "relidentifiersblock": + return "relidentifier_detail" + else: + return "item" + + +records_xml = parseString( + dicttoxml(records, custom_root="records", attr_type=False, item_func=my_item_func) +) + +for item in records_xml.getElementsByTagName("relidentifier_detail"): + item.setAttribute("type", "accession_num") + item.setAttribute("relationType", "Compiles") + +print(records_xml.toprettyxml()) +# response = bld.elink_adapter.post_collection(data=records_xml.toxml()) +# print(response) diff --git a/uv.lock b/uv.lock index b6a2a78..e6682a7 100644 --- a/uv.lock +++ b/uv.lock @@ -118,7 +118,7 @@ wheels = [ [[package]] name = "elinkapi" -version = "0.5.0" +version = "0.5.1" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "pydantic" }, @@ -126,9 +126,9 @@ dependencies = [ { name = "requests-toolbelt" }, { name = "urllib3" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/af/6b/1f146b90638a018eee47e77bc59345cb871b89592f5f33f74f94afb4f596/elinkapi-0.5.0.tar.gz", hash = "sha256:957a0430b0fd6112dcdbe22593c40f4dd9ce2543349fa173a8989d121b28a421", size = 51191, upload-time = "2025-07-30T14:40:41.032Z" } +sdist = { url = "https://files.pythonhosted.org/packages/77/11/aa584c66c16a417433a6ac51d232e4cf35a1b5c5a8a747193c73503c8b14/elinkapi-0.5.1.tar.gz", hash = "sha256:33e73648bcb5272e458215698219dcc1c09645f0726798883a2adcdc07f5e00e", size = 51606, upload-time = "2025-08-06T17:49:57.796Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/c6/7e/bbd7b56119bc41535048474ffe20ab7378fd8eb2dcd2a701784fab73489f/elinkapi-0.5.0-py3-none-any.whl", hash = "sha256:d61e36bea06ca10a58c9272fe79991af63c78e3dba11837eefa71f21fe1e61f0", size = 37333, upload-time = "2025-07-30T14:40:39.843Z" }, + { url = "https://files.pythonhosted.org/packages/86/28/dec8dfc0a2ddd7ba16a90c29bb7c832f9323b5b2c6bb9699244601bdb289/elinkapi-0.5.1-py3-none-any.whl", hash = "sha256:0ab14ed05a5860480697dba860cb684b77cda042006212e597b5c5ec253df481", size = 37695, upload-time = "2025-08-06T17:49:56.434Z" }, ] [[package]] @@ -535,49 +535,49 @@ wheels = [ [[package]] name = "pymongo" -version = "4.13.2" +version = "4.14.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "dnspython" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/4b/5a/d664298bf54762f0c89b8aa2c276868070e06afb853b4a8837de5741e5f9/pymongo-4.13.2.tar.gz", hash = "sha256:0f64c6469c2362962e6ce97258ae1391abba1566a953a492562d2924b44815c2", size = 2167844, upload-time = "2025-06-16T18:16:30.685Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/94/df/4c4ef17b48c70120f834ba7151860c300924915696c4a57170cb5b09787f/pymongo-4.13.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:7af8c56d0a7fcaf966d5292e951f308fb1f8bac080257349e14742725fd7990d", size = 857145, upload-time = "2025-06-16T18:14:56.516Z" }, - { url = "https://files.pythonhosted.org/packages/e7/41/480ca82b3b3320fc70fe699a01df28db15a4ea154c8759ab4a437a74c808/pymongo-4.13.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:ad24f5864706f052b05069a6bc59ff875026e28709548131448fe1e40fc5d80f", size = 857437, upload-time = "2025-06-16T18:14:58.572Z" }, - { url = "https://files.pythonhosted.org/packages/50/d4/eb74e98ea980a5e1ec4f06f383ec6c52ab02076802de24268f477ef616d2/pymongo-4.13.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a10069454195d1d2dda98d681b1dbac9a425f4b0fe744aed5230c734021c1cb9", size = 1426516, upload-time = "2025-06-16T18:15:00.589Z" }, - { url = "https://files.pythonhosted.org/packages/aa/fe/c5960c0e6438bd489367261e5ef1a5db01e34349f0dbf7529fb938d3d2ef/pymongo-4.13.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3e20862b81e3863bcd72334e3577a3107604553b614a8d25ee1bb2caaea4eb90", size = 1477477, upload-time = "2025-06-16T18:15:02.283Z" }, - { url = "https://files.pythonhosted.org/packages/f6/9f/ef4395175fc97876978736c8493d8ffa4d13aa7a4e12269a2cb0d52a1246/pymongo-4.13.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:6b4d5794ca408317c985d7acfb346a60f96f85a7c221d512ff0ecb3cce9d6110", size = 1451921, upload-time = "2025-06-16T18:15:04.35Z" }, - { url = "https://files.pythonhosted.org/packages/2a/b9/397cb2a3ec03f880e882102eddcb46c3d516c6cf47a05f44db48067924d9/pymongo-4.13.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9c8e0420fb4901006ae7893e76108c2a36a343b4f8922466d51c45e9e2ceb717", size = 1431045, upload-time = "2025-06-16T18:15:06.392Z" }, - { url = "https://files.pythonhosted.org/packages/f5/0d/e150a414e5cb07f2fefca817fa071a6da8d96308469a85a777244c8c4337/pymongo-4.13.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:239b5f83b83008471d54095e145d4c010f534af99e87cc8877fc6827736451a0", size = 1399697, upload-time = "2025-06-16T18:15:08.975Z" }, - { url = "https://files.pythonhosted.org/packages/b8/29/5190eafb994721c30a38a8a62df225c47a9da364ab5c8cffe90aabf6a54e/pymongo-4.13.2-cp311-cp311-win32.whl", hash = "sha256:6bceb524110c32319eb7119422e400dbcafc5b21bcc430d2049a894f69b604e5", size = 836261, upload-time = "2025-06-16T18:15:10.459Z" }, - { url = "https://files.pythonhosted.org/packages/d3/da/30bdcc83b23fc4f2996b39b41b2ff0ff2184230a78617c7b8636aac4d81d/pymongo-4.13.2-cp311-cp311-win_amd64.whl", hash = "sha256:ab87484c97ae837b0a7bbdaa978fa932fbb6acada3f42c3b2bee99121a594715", size = 851451, upload-time = "2025-06-16T18:15:12.181Z" }, - { url = "https://files.pythonhosted.org/packages/03/e0/0e187750e23eed4227282fcf568fdb61f2b53bbcf8cbe3a71dde2a860d12/pymongo-4.13.2-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:ec89516622dfc8b0fdff499612c0bd235aa45eeb176c9e311bcc0af44bf952b6", size = 912004, upload-time = "2025-06-16T18:15:14.299Z" }, - { url = "https://files.pythonhosted.org/packages/57/c2/9b79795382daaf41e5f7379bffdef1880d68160adea352b796d6948cb5be/pymongo-4.13.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:f30eab4d4326df54fee54f31f93e532dc2918962f733ee8e115b33e6fe151d92", size = 911698, upload-time = "2025-06-16T18:15:16.334Z" }, - { url = "https://files.pythonhosted.org/packages/6f/e4/f04dc9ed5d1d9dbc539dc2d8758dd359c5373b0e06fcf25418b2c366737c/pymongo-4.13.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0cce9428d12ba396ea245fc4c51f20228cead01119fcc959e1c80791ea45f820", size = 1690357, upload-time = "2025-06-16T18:15:18.358Z" }, - { url = "https://files.pythonhosted.org/packages/bb/de/41478a7d527d38f1b98b084f4a78bbb805439a6ebd8689fbbee0a3dfacba/pymongo-4.13.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ac9241b727a69c39117c12ac1e52d817ea472260dadc66262c3fdca0bab0709b", size = 1754593, upload-time = "2025-06-16T18:15:20.096Z" }, - { url = "https://files.pythonhosted.org/packages/df/d9/8fa2eb110291e154f4312779b1a5b815090b8b05a59ecb4f4a32427db1df/pymongo-4.13.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3efc4c515b371a9fa1d198b6e03340985bfe1a55ae2d2b599a714934e7bc61ab", size = 1723637, upload-time = "2025-06-16T18:15:22.048Z" }, - { url = "https://files.pythonhosted.org/packages/27/7b/9863fa60a4a51ea09f5e3cd6ceb231af804e723671230f2daf3bd1b59c2b/pymongo-4.13.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f57a664aa74610eb7a52fa93f2cf794a1491f4f76098343485dd7da5b3bcff06", size = 1693613, upload-time = "2025-06-16T18:15:24.866Z" }, - { url = "https://files.pythonhosted.org/packages/9b/89/a42efa07820a59089836f409a63c96e7a74e33313e50dc39c554db99ac42/pymongo-4.13.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3dcb0b8cdd499636017a53f63ef64cf9b6bd3fd9355796c5a1d228e4be4a4c94", size = 1652745, upload-time = "2025-06-16T18:15:27.078Z" }, - { url = "https://files.pythonhosted.org/packages/6a/cf/2c77d1acda61d281edd3e3f00d5017d3fac0c29042c769efd3b8018cb469/pymongo-4.13.2-cp312-cp312-win32.whl", hash = "sha256:bf43ae07804d7762b509f68e5ec73450bb8824e960b03b861143ce588b41f467", size = 883232, upload-time = "2025-06-16T18:15:29.169Z" }, - { url = "https://files.pythonhosted.org/packages/d2/4f/727f59156e3798850c3c2901f106804053cb0e057ed1bd9883f5fa5aa8fa/pymongo-4.13.2-cp312-cp312-win_amd64.whl", hash = "sha256:812a473d584bcb02ab819d379cd5e752995026a2bb0d7713e78462b6650d3f3a", size = 903304, upload-time = "2025-06-16T18:15:31.346Z" }, - { url = "https://files.pythonhosted.org/packages/e0/95/b44b8e24b161afe7b244f6d43c09a7a1f93308cad04198de1c14c67b24ce/pymongo-4.13.2-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:d6044ca0eb74d97f7d3415264de86a50a401b7b0b136d30705f022f9163c3124", size = 966232, upload-time = "2025-06-16T18:15:33.057Z" }, - { url = "https://files.pythonhosted.org/packages/6d/fc/d4d59799a52033acb187f7bd1f09bc75bebb9fd12cef4ba2964d235ad3f9/pymongo-4.13.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:dd326bcb92d28d28a3e7ef0121602bad78691b6d4d1f44b018a4616122f1ba8b", size = 965935, upload-time = "2025-06-16T18:15:34.826Z" }, - { url = "https://files.pythonhosted.org/packages/07/a8/67502899d89b317ea9952e4769bc193ca15efee561b24b38a86c59edde6f/pymongo-4.13.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dfb0c21bdd58e58625c9cd8de13e859630c29c9537944ec0a14574fdf88c2ac4", size = 1954070, upload-time = "2025-06-16T18:15:36.576Z" }, - { url = "https://files.pythonhosted.org/packages/da/3b/0dac5d81d1af1b96b3200da7ccc52fc261a35efb7d2ac493252eb40a2b11/pymongo-4.13.2-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c9c7d345d57f17b1361008aea78a37e8c139631a46aeb185dd2749850883c7ba", size = 2031424, upload-time = "2025-06-16T18:15:38.723Z" }, - { url = "https://files.pythonhosted.org/packages/31/ed/7a5af49a153224ca7e31e9915703e612ad9c45808cc39540e9dd1a2a7537/pymongo-4.13.2-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8860445a8da1b1545406fab189dc20319aff5ce28e65442b2b4a8f4228a88478", size = 1995339, upload-time = "2025-06-16T18:15:40.474Z" }, - { url = "https://files.pythonhosted.org/packages/f1/e9/9c72eceae8439c4f1bdebc4e6b290bf035e3f050a80eeb74abb5e12ef8e2/pymongo-4.13.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:01c184b612f67d5a4c8f864ae7c40b6cc33c0e9bb05e39d08666f8831d120504", size = 1956066, upload-time = "2025-06-16T18:15:42.272Z" }, - { url = "https://files.pythonhosted.org/packages/ac/79/9b019c47923395d5fced03856996465fb9340854b0f5a2ddf16d47e2437c/pymongo-4.13.2-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ae2ea8c62d5f3c6529407c12471385d9a05f9fb890ce68d64976340c85cd661b", size = 1905642, upload-time = "2025-06-16T18:15:43.978Z" }, - { url = "https://files.pythonhosted.org/packages/93/2f/ebf56c7fa9298fa2f9716e7b66cf62b29e7fc6e11774f3b87f55d214d466/pymongo-4.13.2-cp313-cp313-win32.whl", hash = "sha256:d13556e91c4a8cb07393b8c8be81e66a11ebc8335a40fa4af02f4d8d3b40c8a1", size = 930184, upload-time = "2025-06-16T18:15:46.899Z" }, - { url = "https://files.pythonhosted.org/packages/76/2f/49c35464cbd5d116d950ff5d24b4b20491aaae115d35d40b945c33b29250/pymongo-4.13.2-cp313-cp313-win_amd64.whl", hash = "sha256:cfc69d7bc4d4d5872fd1e6de25e6a16e2372c7d5556b75c3b8e2204dce73e3fb", size = 955111, upload-time = "2025-06-16T18:15:48.85Z" }, - { url = "https://files.pythonhosted.org/packages/57/56/b17c8b5329b1842b7847cf0fa224ef0a272bf2e5126360f4da8065c855a1/pymongo-4.13.2-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:a457d2ac34c05e9e8a6bb724115b093300bf270f0655fb897df8d8604b2e3700", size = 1022735, upload-time = "2025-06-16T18:15:50.672Z" }, - { url = "https://files.pythonhosted.org/packages/83/e6/66fec65a7919bf5f35be02e131b4dc4bf3152b5e8d78cd04b6d266a44514/pymongo-4.13.2-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:02f131a6e61559613b1171b53fbe21fed64e71b0cb4858c47fc9bc7c8e0e501c", size = 1022740, upload-time = "2025-06-16T18:15:53.218Z" }, - { url = "https://files.pythonhosted.org/packages/17/92/cda7383df0d5e71dc007f172c1ecae6313d64ea05d82bbba06df7f6b3e49/pymongo-4.13.2-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8c942d1c6334e894271489080404b1a2e3b8bd5de399f2a0c14a77d966be5bc9", size = 2282430, upload-time = "2025-06-16T18:15:55.356Z" }, - { url = "https://files.pythonhosted.org/packages/84/da/285e05eb1d617b30dc7a7a98ebeb264353a8903e0e816a4eec6487c81f18/pymongo-4.13.2-cp313-cp313t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:850168d115680ab66a0931a6aa9dd98ed6aa5e9c3b9a6c12128049b9a5721bc5", size = 2369470, upload-time = "2025-06-16T18:15:57.5Z" }, - { url = "https://files.pythonhosted.org/packages/89/c0/c0d5eae236de9ca293497dc58fc1e4872382223c28ec223f76afc701392c/pymongo-4.13.2-cp313-cp313t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:af7dfff90647ee77c53410f7fe8ca4fe343f8b768f40d2d0f71a5602f7b5a541", size = 2328857, upload-time = "2025-06-16T18:15:59.59Z" }, - { url = "https://files.pythonhosted.org/packages/2b/5a/d8639fba60def128ce9848b99c56c54c8a4d0cd60342054cd576f0bfdf26/pymongo-4.13.2-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f8057f9bc9c94a8fd54ee4f5e5106e445a8f406aff2df74746f21c8791ee2403", size = 2280053, upload-time = "2025-06-16T18:16:02.166Z" }, - { url = "https://files.pythonhosted.org/packages/a1/69/d56f0897cc4932a336820c5d2470ffed50be04c624b07d1ad6ea75aaa975/pymongo-4.13.2-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:51040e1ba78d6671f8c65b29e2864483451e789ce93b1536de9cc4456ede87fa", size = 2219378, upload-time = "2025-06-16T18:16:04.108Z" }, - { url = "https://files.pythonhosted.org/packages/04/1e/427e7f99801ee318b6331062d682d3816d7e1d6b6013077636bd75d49c87/pymongo-4.13.2-cp313-cp313t-win32.whl", hash = "sha256:7ab86b98a18c8689514a9f8d0ec7d9ad23a949369b31c9a06ce4a45dcbffcc5e", size = 979460, upload-time = "2025-06-16T18:16:06.128Z" }, - { url = "https://files.pythonhosted.org/packages/b5/9c/00301a6df26f0f8d5c5955192892241e803742e7c3da8c2c222efabc0df6/pymongo-4.13.2-cp313-cp313t-win_amd64.whl", hash = "sha256:c38168263ed94a250fc5cf9c6d33adea8ab11c9178994da1c3481c2a49d235f8", size = 1011057, upload-time = "2025-06-16T18:16:07.917Z" }, +sdist = { url = "https://files.pythonhosted.org/packages/bf/1c/f148bb1747c48955dbeea34a53c6d60b858f902c61c62330d277ee806af7/pymongo-4.14.0.tar.gz", hash = "sha256:15674e3fddce78cf134fc4e55f90abf1608a48430130cd35efdf3802fd47a1d1", size = 2213509, upload-time = "2025-08-06T13:41:11.04Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/fd/31/e67483f313f70d7440e820246ec500bf039a9c905c6d374cdd970ed9241d/pymongo-4.14.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:88f9c415c59450c0ac4133aa4745459101619ca7997dc468209bf395563667d2", size = 859161, upload-time = "2025-08-06T13:39:49.139Z" }, + { url = "https://files.pythonhosted.org/packages/b5/9b/afc662756e32922207b99ffc0f3d01cee5a495af0078a675a4dfc901ef75/pymongo-4.14.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:6513474970fdf3afd9dc9de9065a31a1efc8288ca9068510e5e973fa80200c8f", size = 859458, upload-time = "2025-08-06T13:39:50.469Z" }, + { url = "https://files.pythonhosted.org/packages/24/9b/078cc8fe51836f4ec1bc2d49e0cfccfc3b914991213cc50c028d5b268a44/pymongo-4.14.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d60c0e9b13603317062f316906fb5be4000f5b5fe288eb6e9df4ef8695863cd8", size = 1428520, upload-time = "2025-08-06T13:39:52.045Z" }, + { url = "https://files.pythonhosted.org/packages/d2/0e/89864b0ab731927bffb60485238914d9462adbc93061d0c38dd60deb346f/pymongo-4.14.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a203cb757f75804c43aec48f23cb138e890a24219716ce9958041dace39ba470", size = 1479481, upload-time = "2025-08-06T13:39:54.913Z" }, + { url = "https://files.pythonhosted.org/packages/3b/8d/1b8ee7f66804d6ea88f7bfc2cf7fce1bd3b2598cb9e003a4406eb10d7405/pymongo-4.14.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4bdf65bf8167a92b70de44d28ed9df1c2dec83fe2a82e26c01fc89da8ca6bc34", size = 1453925, upload-time = "2025-08-06T13:39:56.234Z" }, + { url = "https://files.pythonhosted.org/packages/4e/0e/97a8e082bbb60b39276f891f74aedb2f5ee97bbdee88690ef313f341690a/pymongo-4.14.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c1f79fdd99db135dbc1678477793764c156c11623d0d9dbe4c57767d081b79b8", size = 1433050, upload-time = "2025-08-06T13:39:57.556Z" }, + { url = "https://files.pythonhosted.org/packages/b0/88/6627ecc431fa63008715dd0927204691d356c996de04d1477fa2317ea706/pymongo-4.14.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:05152a2ca55805c37f77ac473e51915f44bba9a6b32fed875fa9df61d81681ca", size = 1401702, upload-time = "2025-08-06T13:39:59.265Z" }, + { url = "https://files.pythonhosted.org/packages/20/9a/1d253195763c865336536e5f328a86691db5ee642714ea1f12d51491223e/pymongo-4.14.0-cp311-cp311-win32.whl", hash = "sha256:aa25505e36e32bef3fa135578461f24735e9d4b7b62e6aa21eb8f2d163cef86d", size = 838006, upload-time = "2025-08-06T13:40:00.936Z" }, + { url = "https://files.pythonhosted.org/packages/21/0d/9e5243870e2ff2d2c4552d32e22fd4d3079466e3350b91bc3d68b99d19d5/pymongo-4.14.0-cp311-cp311-win_amd64.whl", hash = "sha256:c57aef3b48e8c7818689604ff24e54524e164056ec56ee5ea48384264360bf59", size = 852617, upload-time = "2025-08-06T13:40:02.374Z" }, + { url = "https://files.pythonhosted.org/packages/ca/fd/71936f5188d76e1e5d86749c39fb15f7aaf7fdc0a81d62ca084cad9ed740/pymongo-4.14.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:864f005459ef9b19c60cb96ca755124e0291d211000859f33c62a166f55eba27", size = 914026, upload-time = "2025-08-06T13:40:06.024Z" }, + { url = "https://files.pythonhosted.org/packages/bd/7e/821ec87233b0cdc0cb5b2f9845d7ff52e94e5a37cc05c6d59a3d6c5f6f98/pymongo-4.14.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:69956f971a6f8dafc62e5c83ac21ebf15d5757e13758284f12218ad7fbd3c0fe", size = 913721, upload-time = "2025-08-06T13:40:07.459Z" }, + { url = "https://files.pythonhosted.org/packages/82/67/174f6b92efe4da967e8fcbaa25e59c6cb06efd395cacc9558c1254565031/pymongo-4.14.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:91200f47453a1cb97136629e28f4091a109756ec37067b622f90c4b626b4af8d", size = 1692360, upload-time = "2025-08-06T13:40:08.857Z" }, + { url = "https://files.pythonhosted.org/packages/d1/b5/36e84df138cd4a8280334ed5e6e7f5fa52a9cbe933cd68d9e10c9bca6639/pymongo-4.14.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c32f5e23e9dd31e20800d993f3695b6627664dc7da30ac1669f9716833b33175", size = 1756598, upload-time = "2025-08-06T13:40:10.61Z" }, + { url = "https://files.pythonhosted.org/packages/ad/7f/c6964ce567a4cc6248d7f0959af9b7d9e51837a3ca3d54b15ade6eecf376/pymongo-4.14.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f4407c1ab7514e08d4af0f58cf9d7eddc86e45e458fe46f99a72a5d18dbc71dc", size = 1725641, upload-time = "2025-08-06T13:40:12.07Z" }, + { url = "https://files.pythonhosted.org/packages/7d/5b/827dca0b1b53118a96749a63886c1bbc04bf56b68424038baf7dabc98380/pymongo-4.14.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0ec6666e599ee10cc5cde0cc6a8519373384a14af3a310ede1bf177105f38fb0", size = 1695618, upload-time = "2025-08-06T13:40:13.845Z" }, + { url = "https://files.pythonhosted.org/packages/9e/45/f53f6531836f9da26b753c60e0d0a0c6f22ac023ba8ef1286addf56ce86f/pymongo-4.14.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a809a982a843bb561c7030059d54ea7f1dcc967cc72a45f1435695e2a2a515a5", size = 1654748, upload-time = "2025-08-06T13:40:15.401Z" }, + { url = "https://files.pythonhosted.org/packages/83/3b/4b33d36c00a0c44889322b8e9a0650aa5668bc531f6301f01ad7a242d120/pymongo-4.14.0-cp312-cp312-win32.whl", hash = "sha256:3866d031fcbe81d7677c078026e650aeef8915560ba758a28051debce38f6b77", size = 884844, upload-time = "2025-08-06T13:40:17.21Z" }, + { url = "https://files.pythonhosted.org/packages/eb/f6/68b52e16fb831c246171379ae2115cc8cb282f6b7b47fbe7fb8cc4b9df1f/pymongo-4.14.0-cp312-cp312-win_amd64.whl", hash = "sha256:91b8de871a40225bbe4f92d6dc3f20c26bf838e49d3563592131401af0d665a6", size = 904277, upload-time = "2025-08-06T13:40:18.722Z" }, + { url = "https://files.pythonhosted.org/packages/97/99/d6c145e57387bfa2b6d90f4f5285f2b0903625733dcc6403aa9d7abeddbb/pymongo-4.14.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:54576faf8e6fefe17886a201f9df61bdf728ccac9a7a0847095a0e8480cd6ec1", size = 968259, upload-time = "2025-08-06T13:40:20.551Z" }, + { url = "https://files.pythonhosted.org/packages/8d/36/5226af83554bbfa0d754aa1ab022af92f64a2376604d56c9a8c50e247b85/pymongo-4.14.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:c64ef5e58adedecb853a680eb5d1aea50770197f212e202d6eb50c801797b576", size = 967959, upload-time = "2025-08-06T13:40:21.962Z" }, + { url = "https://files.pythonhosted.org/packages/6d/f5/4e627e3e5230e8c62c5fe218b5cb1347a1b01932fd6446c3f03e18ec29c5/pymongo-4.14.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5f1c0cdddf9783065bf55d3fe025843c0974a828bafc9bb5514ae28dd2828a40", size = 1956074, upload-time = "2025-08-06T13:40:23.413Z" }, + { url = "https://files.pythonhosted.org/packages/16/05/989cfdc8536245a55a549f76ba20356822ebfce752e72a5164bd0795ace0/pymongo-4.14.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:22805d4fa587b526ac3129ee634a8761abbeb76883045718438f5b8e72f91ce6", size = 2033427, upload-time = "2025-08-06T13:40:25.325Z" }, + { url = "https://files.pythonhosted.org/packages/93/a5/fafef0230fa6cc5d4bb5addcea77d8b71f4eca4d31a5a596d26398aaa45a/pymongo-4.14.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7c3e6e669cf36b27694de2730f5d5b31ef492ffe99446563192c4e8ee84ca859", size = 1997344, upload-time = "2025-08-06T13:40:26.91Z" }, + { url = "https://files.pythonhosted.org/packages/9b/6c/1170d5c8e087832dba2b0930ec90bafde8a59efa37b521d9a5902ec9d282/pymongo-4.14.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9763a2477d5388df65ab6f591cf0cb7fd34f4a66f873c31e63288fd79887742c", size = 1958072, upload-time = "2025-08-06T13:40:28.865Z" }, + { url = "https://files.pythonhosted.org/packages/27/7b/bb07c7c4c102046ff92f3acd05d85870e06a08df2a0fcd2e87586f2516fe/pymongo-4.14.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:88d6f5aceab3528b8760942a57635599925f7fd743691f9d759a02124207dfd0", size = 1907647, upload-time = "2025-08-06T13:40:30.435Z" }, + { url = "https://files.pythonhosted.org/packages/21/e6/c0ee2163133f1a437cd3b0308521e24181b69b5510b9eabde9ee86999c12/pymongo-4.14.0-cp313-cp313-win32.whl", hash = "sha256:e283feafde118cbbb03adc036b882be042b0a2eca121ec5d6bbec3e12980e8fa", size = 931673, upload-time = "2025-08-06T13:40:31.956Z" }, + { url = "https://files.pythonhosted.org/packages/ec/1d/1692f0696d8e6bcb3e43469999eeb92d5f0acdb9a50aca2c869820321df9/pymongo-4.14.0-cp313-cp313-win_amd64.whl", hash = "sha256:a29b62d421a512833e99d4781b64e695cfe23b4c4a9159ea83e56fc2660f2480", size = 955893, upload-time = "2025-08-06T13:40:33.461Z" }, + { url = "https://files.pythonhosted.org/packages/fd/19/f3ed531d7151dc2d1be27746c206e74403283873ec5d12170616982eccb0/pymongo-4.14.0-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:67225d5fb1be2a34c6f73cde9d81a8041a095a94ed2433e2cf9e2f1657443def", size = 1024757, upload-time = "2025-08-06T13:40:35.277Z" }, + { url = "https://files.pythonhosted.org/packages/36/f9/38782d41e16d11ba540ddfc618104e249a5b950a446b8a77a17f3416c6e6/pymongo-4.14.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:bbf0dbe8554978c1271bdc82366852245a03ab124a1387b6f3531f64adac3c39", size = 1024765, upload-time = "2025-08-06T13:40:36.797Z" }, + { url = "https://files.pythonhosted.org/packages/82/75/b385aa6ed09d077b47b00d4bc3c4b9bdac97b50fde3be7599ff9ceef8cbc/pymongo-4.14.0-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b7f61561cbc7426ffc2d37b46e65ab5323fc366644f70c8e2240ed5452e2c402", size = 2284433, upload-time = "2025-08-06T13:40:38.35Z" }, + { url = "https://files.pythonhosted.org/packages/f2/29/d0dbf281e58e26dbeef4e972905b371c63d33c5aa8caa0d58140224bfee5/pymongo-4.14.0-cp313-cp313t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:502cd551679753fb7838221c3bbb963da4b4aa0576848192afb1f78128ff729a", size = 2371473, upload-time = "2025-08-06T13:40:39.852Z" }, + { url = "https://files.pythonhosted.org/packages/db/eb/089dfc96a29881ed17964705ea254da2f8b3aebf9754abd6aaa8125e1589/pymongo-4.14.0-cp313-cp313t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cba6b59afef2b47c4859bf36115fa577330601b93e39d04f39fcc6103e801286", size = 2330862, upload-time = "2025-08-06T13:40:41.803Z" }, + { url = "https://files.pythonhosted.org/packages/35/07/b4f1215314e5f1114a899c33f17219b1f590502c736058c50571fa189ed1/pymongo-4.14.0-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1c0fa103f978c15f7c2f0d7b2e010c24c327432a0310503bc0ec93c5f9be9e81", size = 2282058, upload-time = "2025-08-06T13:40:43.467Z" }, + { url = "https://files.pythonhosted.org/packages/c0/a1/87340e5a38003ef3591fdfc4b911fb32531b0dbbed8ab2431006858590fe/pymongo-4.14.0-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5fe93676afe37794f01b8df5cf16528dce4d7d174cdf51ea1586c234eb5263c2", size = 2221380, upload-time = "2025-08-06T13:40:45.256Z" }, + { url = "https://files.pythonhosted.org/packages/0a/68/3970d18b0b58c4681598bc1e233f33c15ff0c388422b17ddd195e214f35d/pymongo-4.14.0-cp313-cp313t-win32.whl", hash = "sha256:1462fc2bb39527f01eea5378172b66c45d62e22fa4be957afe2ec747c4d2ff51", size = 980892, upload-time = "2025-08-06T13:40:46.908Z" }, + { url = "https://files.pythonhosted.org/packages/2d/fa/68b1555e62ed3ee87f8a2de99d5fb840cf045748da4488870b4dced44a95/pymongo-4.14.0-cp313-cp313t-win_amd64.whl", hash = "sha256:e506af9b25aac77cc5c5ea4a72f81764e4f5ea90ca799aac43d665ab269f291d", size = 1011181, upload-time = "2025-08-06T13:40:48.641Z" }, ] [[package]] @@ -748,16 +748,16 @@ wheels = [ [[package]] name = "virtualenv" -version = "20.33.0" +version = "20.33.1" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "distlib" }, { name = "filelock" }, { name = "platformdirs" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/db/2e/8a70dcbe8bf15213a08f9b0325ede04faca5d362922ae0d62ef0fa4b069d/virtualenv-20.33.0.tar.gz", hash = "sha256:47e0c0d2ef1801fce721708ccdf2a28b9403fa2307c3268aebd03225976f61d2", size = 6082069, upload-time = "2025-08-03T08:09:19.014Z" } +sdist = { url = "https://files.pythonhosted.org/packages/8b/60/4f20960df6c7b363a18a55ab034c8f2bcd5d9770d1f94f9370ec104c1855/virtualenv-20.33.1.tar.gz", hash = "sha256:1b44478d9e261b3fb8baa5e74a0ca3bc0e05f21aa36167bf9cbf850e542765b8", size = 6082160, upload-time = "2025-08-05T16:10:55.605Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/43/87/b22cf40cdf7e2b2bf83f38a94d2c90c5ad6c304896e5a12d0c08a602eb59/virtualenv-20.33.0-py3-none-any.whl", hash = "sha256:106b6baa8ab1b526d5a9b71165c85c456fbd49b16976c88e2bc9352ee3bc5d3f", size = 6060205, upload-time = "2025-08-03T08:09:16.674Z" }, + { url = "https://files.pythonhosted.org/packages/ca/ff/ded57ac5ff40a09e6e198550bab075d780941e0b0f83cbeabd087c59383a/virtualenv-20.33.1-py3-none-any.whl", hash = "sha256:07c19bc66c11acab6a5958b815cbcee30891cd1c2ccf53785a28651a0d8d8a67", size = 6060362, upload-time = "2025-08-05T16:10:52.81Z" }, ] [[package]] From 5c74861f6fded6563eb4cf95577127fc4555844e Mon Sep 17 00:00:00 2001 From: HugoOnghai Date: Mon, 14 Jul 2025 10:50:00 -0700 Subject: [PATCH 35/65] Added linting workflow, hopefully it works --- .github/workflows/lint.yml | 4 ---- pyproject.toml | 2 +- 2 files changed, 1 insertion(+), 5 deletions(-) diff --git a/.github/workflows/lint.yml b/.github/workflows/lint.yml index c854112..1a76e83 100644 --- a/.github/workflows/lint.yml +++ b/.github/workflows/lint.yml @@ -5,7 +5,6 @@ on: branches: [master] pull_request: branches: [master] -<<<<<<< HEAD workflow_dispatch: jobs: @@ -22,6 +21,3 @@ jobs: - name: Analyzing the code with ruff run: uvx ruff check --output-format=github -======= -# TODO: setup linting with uv/ruff ->>>>>>> 52382ff (Merged upstream (#1)) diff --git a/pyproject.toml b/pyproject.toml index 52babb3..e93ed31 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -5,7 +5,7 @@ description = "Add your description here" readme = "README.md" authors = [ { name = "The Materials Project", email = "feedback@materialsproject.org" }, - { name = "Hugo Onghai", email = "" }, + { name = "Hugo Onghai", email = "hugoonghai@g.ucla.edu" }, { name = "Tyler Mathis", email = "35553152+tsmathis@users.noreply.github.com" }, ] maintainers = [ From 4c4989d01ceb9689c8512007b853fa8190121b8a Mon Sep 17 00:00:00 2001 From: HugoOnghai Date: Wed, 16 Jul 2025 10:37:37 -0700 Subject: [PATCH 36/65] New Branch for Linting Workflow --- .gitignore | 11 ++++++++++- 1 file changed, 10 insertions(+), 1 deletion(-) diff --git a/.gitignore b/.gitignore index 8241d4c..d35bdfc 100644 --- a/.gitignore +++ b/.gitignore @@ -210,6 +210,15 @@ __marimo__/ # Streamlit .streamlit/secrets.toml +<<<<<<< HEAD json_pages/ notebooks/ -test_json_pages/ \ No newline at end of file +test_json_pages/ +======= +# json files for storing production records +*.json +.env +/json_pages +/notebooks +/test_json_pages +>>>>>>> b991f09 (New Branch for Linting Workflow) From 7d3fd12b7ac3bd6717b4b27d62da0395881f2b41 Mon Sep 17 00:00:00 2001 From: HugoOnghai Date: Thu, 17 Jul 2025 15:31:57 -0700 Subject: [PATCH 37/65] Testing Linting workflow --- .github/workflows/lint.yml | 31 +++++++++++++++++++++++++++++++ src/mp_cite/core.py | 2 +- 2 files changed, 32 insertions(+), 1 deletion(-) diff --git a/.github/workflows/lint.yml b/.github/workflows/lint.yml index 1a76e83..522a6bc 100644 --- a/.github/workflows/lint.yml +++ b/.github/workflows/lint.yml @@ -7,6 +7,7 @@ on: branches: [master] workflow_dispatch: +<<<<<<< HEAD jobs: linting: runs-on: ubuntu-latest @@ -21,3 +22,33 @@ jobs: - name: Analyzing the code with ruff run: uvx ruff check --output-format=github +======= +# TODO: setup linting with uv/ruff +# informed by testing.yml and https://medium.com/@sidharthvijayakumar7/automating-pylint-in-github-workflow-80c84b2ff243 and ruff documentation +jobs: + linting: + strategy: + matrix: + os: ["ubuntu-latest"] + python-version: ["3.11", "3.12", "3.13"] + + name: mp-cite (${{ matrix.os }}/py${{ matrix.python-version }}) + runs-on: ${{ matrix.os }} + + steps: + - uses: actions/checkout@v4 + + - name: Install uv + uses: astral-sh/setup-uv@v6 + with: + python-version: ${{ matrix.python-version }} + version: "latest" + + - name: Install dependencies + run: | + python -m pip install --upgrade pip + pip install ruff + # Update output format to enable automatic inline annotations + - name: Analyzing the code with ruff + run: ruff check --output-format=github +>>>>>>> 7c6a8e7 (Testing Linting workflow) diff --git a/src/mp_cite/core.py b/src/mp_cite/core.py index db0de7a..7b1bd60 100644 --- a/src/mp_cite/core.py +++ b/src/mp_cite/core.py @@ -154,4 +154,4 @@ def delete_osti_record(elinkapi: Elink, osti_id: OstiID, reason: str) -> bool: headers={"Authorization": f"Bearer {elinkapi.token}"}, ) Validation.handle_response(response) - return response.status_code == 204 # True if deleted successfully + return response.status_code == 204 # True if deleted successfully \ No newline at end of file From 1baf282b27dbb456da96b5d32450791641d8c3f8 Mon Sep 17 00:00:00 2001 From: HugoOnghai Date: Thu, 17 Jul 2025 15:37:26 -0700 Subject: [PATCH 38/65] Allowing Lint.YML to run on push to linting_workflow --- .github/workflows/lint.yml | 31 ------------------------------- 1 file changed, 31 deletions(-) diff --git a/.github/workflows/lint.yml b/.github/workflows/lint.yml index 522a6bc..1a76e83 100644 --- a/.github/workflows/lint.yml +++ b/.github/workflows/lint.yml @@ -7,7 +7,6 @@ on: branches: [master] workflow_dispatch: -<<<<<<< HEAD jobs: linting: runs-on: ubuntu-latest @@ -22,33 +21,3 @@ jobs: - name: Analyzing the code with ruff run: uvx ruff check --output-format=github -======= -# TODO: setup linting with uv/ruff -# informed by testing.yml and https://medium.com/@sidharthvijayakumar7/automating-pylint-in-github-workflow-80c84b2ff243 and ruff documentation -jobs: - linting: - strategy: - matrix: - os: ["ubuntu-latest"] - python-version: ["3.11", "3.12", "3.13"] - - name: mp-cite (${{ matrix.os }}/py${{ matrix.python-version }}) - runs-on: ${{ matrix.os }} - - steps: - - uses: actions/checkout@v4 - - - name: Install uv - uses: astral-sh/setup-uv@v6 - with: - python-version: ${{ matrix.python-version }} - version: "latest" - - - name: Install dependencies - run: | - python -m pip install --upgrade pip - pip install ruff - # Update output format to enable automatic inline annotations - - name: Analyzing the code with ruff - run: ruff check --output-format=github ->>>>>>> 7c6a8e7 (Testing Linting workflow) From 31a973ad82811b0056d930929fe5f1821783f42c Mon Sep 17 00:00:00 2001 From: HugoOnghai Date: Thu, 17 Jul 2025 16:25:37 -0700 Subject: [PATCH 39/65] Testing pre-commit and updated lint.yml to disregard legacy files --- .gitignore | 4 ++-- pyproject.toml | 41 +++++++++++++++++++++++++++++++++++++++++ src/mp_cite/core.py | 6 +++++- 3 files changed, 48 insertions(+), 3 deletions(-) diff --git a/.gitignore b/.gitignore index d35bdfc..6b641e9 100644 --- a/.gitignore +++ b/.gitignore @@ -183,9 +183,9 @@ cython_debug/ .abstra/ # Visual Studio Code -# Visual Studio Code specific template is maintained in a separate VisualStudioCode.gitignore +# Visual Studio Code specific template is maintained in a separate VisualStudioCode.gitignore # that can be found at https://github.com/github/gitignore/blob/main/Global/VisualStudioCode.gitignore -# and can be added to the global gitignore or merged into this file. However, if you prefer, +# and can be added to the global gitignore or merged into this file. However, if you prefer, # you could uncomment the following to ignore the entire vscode folder .vscode/ diff --git a/pyproject.toml b/pyproject.toml index e93ed31..9c723eb 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -50,15 +50,51 @@ Issues = "https://github.com/materialsproject/MPCite/issues" [tool.ruff] # Exclude a variety of commonly ignored directories. exclude = [ +<<<<<<< HEAD "legacy", "notebooks", "uv.lock" +======= + ".bzr", + ".direnv", + ".eggs", + ".git", + ".git-rewrite", + ".hg", + ".ipynb_checkpoints", + ".mypy_cache", + ".nox", + ".pants.d", + ".pyenv", + ".pytest_cache", + ".pytype", + ".ruff_cache", + ".svn", + ".tox", + ".venv", + ".vscode", + "__pypackages__", + "_build", + "buck-out", + "build", + "dist", + "node_modules", + "site-packages", + "venv", + "legacy" +>>>>>>> d9ab19c (Testing pre-commit and updated lint.yml to disregard legacy files) ] # Same as Black. line-length = 88 indent-width = 4 +<<<<<<< HEAD +======= +# Assume Python 3.9 +target-version = "py39" + +>>>>>>> d9ab19c (Testing pre-commit and updated lint.yml to disregard legacy files) [tool.ruff.lint] # Enable Pyflakes (`F`) and a subset of the pycodestyle (`E`) codes by default. # Unlike Flake8, Ruff doesn't enable pycodestyle warnings (`W`) or @@ -70,8 +106,13 @@ ignore = [] fixable = ["ALL"] unfixable = [] +<<<<<<< HEAD # Allow NO unused variables to exist in the codebase. If underscore-prefixed unused variables are permissible, use this regex $^(_+|(_+[a-zA-Z0-9_]*[a-zA-Z0-9]+?))$" dummy-variable-rgx = "^$" +======= +# Allow unused variables when underscore-prefixed. +dummy-variable-rgx = "^(_+|(_+[a-zA-Z0-9_]*[a-zA-Z0-9]+?))$" +>>>>>>> d9ab19c (Testing pre-commit and updated lint.yml to disregard legacy files) [tool.ruff.format] # Like Black, use double quotes for strings. diff --git a/src/mp_cite/core.py b/src/mp_cite/core.py index 7b1bd60..2839869 100644 --- a/src/mp_cite/core.py +++ b/src/mp_cite/core.py @@ -154,4 +154,8 @@ def delete_osti_record(elinkapi: Elink, osti_id: OstiID, reason: str) -> bool: headers={"Authorization": f"Bearer {elinkapi.token}"}, ) Validation.handle_response(response) - return response.status_code == 204 # True if deleted successfully \ No newline at end of file +<<<<<<< HEAD + return response.status_code == 204 # True if deleted successfully +======= + return response.status_code == 204 # True if deleted successfully +>>>>>>> d7a7e39 (Testing pre-commit and updated lint.yml to disregard legacy files) From f07523aa01a2bcea8f2125a2af1a55df0a74b26f Mon Sep 17 00:00:00 2001 From: HugoOnghai Date: Thu, 17 Jul 2025 16:42:06 -0700 Subject: [PATCH 40/65] Using uv to install ruff dependency, using uv-cache and removing pip installation --- .github/workflows/lint.yml | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/.github/workflows/lint.yml b/.github/workflows/lint.yml index 1a76e83..4fa79be 100644 --- a/.github/workflows/lint.yml +++ b/.github/workflows/lint.yml @@ -19,5 +19,12 @@ jobs: python-version: "3.12" version: "latest" +<<<<<<< HEAD +======= + - name: Install ruff + run: | + uv pip install ruff + # Update output format to enable automatic inline annotations +>>>>>>> 27ac04f (Using uv to install ruff dependency, using uv-cache and removing pip installation) - name: Analyzing the code with ruff run: uvx ruff check --output-format=github From cf03cba50fd42a93e16d2f6ae05bee14866c71b8 Mon Sep 17 00:00:00 2001 From: HugoOnghai Date: Thu, 17 Jul 2025 16:49:26 -0700 Subject: [PATCH 41/65] added new action to install virtual environment before attempting to install dependency --- .github/workflows/lint.yml | 7 ------- 1 file changed, 7 deletions(-) diff --git a/.github/workflows/lint.yml b/.github/workflows/lint.yml index 4fa79be..1a76e83 100644 --- a/.github/workflows/lint.yml +++ b/.github/workflows/lint.yml @@ -19,12 +19,5 @@ jobs: python-version: "3.12" version: "latest" -<<<<<<< HEAD -======= - - name: Install ruff - run: | - uv pip install ruff - # Update output format to enable automatic inline annotations ->>>>>>> 27ac04f (Using uv to install ruff dependency, using uv-cache and removing pip installation) - name: Analyzing the code with ruff run: uvx ruff check --output-format=github From d4e64869363652c431afdd2d416b713ad4b6420f Mon Sep 17 00:00:00 2001 From: HugoOnghai Date: Thu, 17 Jul 2025 17:29:19 -0700 Subject: [PATCH 42/65] attempting ruff check again now that is seems to work... --- pyproject.toml | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/pyproject.toml b/pyproject.toml index 9c723eb..12cf519 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -81,8 +81,14 @@ exclude = [ "node_modules", "site-packages", "venv", +<<<<<<< HEAD "legacy" >>>>>>> d9ab19c (Testing pre-commit and updated lint.yml to disregard legacy files) +======= + "legacy", + "notebooks", + "uv.lock" +>>>>>>> c4d9a8f (attempting ruff check again now that is seems to work...) ] # Same as Black. From cdf81e0593a9fcfa4eb2287ede3dda8137680f49 Mon Sep 17 00:00:00 2001 From: HugoOnghai Date: Mon, 21 Jul 2025 13:26:41 -0700 Subject: [PATCH 43/65] Removed assume python 3.9 from ruff config in pyproject.toml --- pyproject.toml | 47 ----------------------------------------------- 1 file changed, 47 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index 12cf519..e93ed31 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -50,57 +50,15 @@ Issues = "https://github.com/materialsproject/MPCite/issues" [tool.ruff] # Exclude a variety of commonly ignored directories. exclude = [ -<<<<<<< HEAD "legacy", "notebooks", "uv.lock" -======= - ".bzr", - ".direnv", - ".eggs", - ".git", - ".git-rewrite", - ".hg", - ".ipynb_checkpoints", - ".mypy_cache", - ".nox", - ".pants.d", - ".pyenv", - ".pytest_cache", - ".pytype", - ".ruff_cache", - ".svn", - ".tox", - ".venv", - ".vscode", - "__pypackages__", - "_build", - "buck-out", - "build", - "dist", - "node_modules", - "site-packages", - "venv", -<<<<<<< HEAD - "legacy" ->>>>>>> d9ab19c (Testing pre-commit and updated lint.yml to disregard legacy files) -======= - "legacy", - "notebooks", - "uv.lock" ->>>>>>> c4d9a8f (attempting ruff check again now that is seems to work...) ] # Same as Black. line-length = 88 indent-width = 4 -<<<<<<< HEAD -======= -# Assume Python 3.9 -target-version = "py39" - ->>>>>>> d9ab19c (Testing pre-commit and updated lint.yml to disregard legacy files) [tool.ruff.lint] # Enable Pyflakes (`F`) and a subset of the pycodestyle (`E`) codes by default. # Unlike Flake8, Ruff doesn't enable pycodestyle warnings (`W`) or @@ -112,13 +70,8 @@ ignore = [] fixable = ["ALL"] unfixable = [] -<<<<<<< HEAD # Allow NO unused variables to exist in the codebase. If underscore-prefixed unused variables are permissible, use this regex $^(_+|(_+[a-zA-Z0-9_]*[a-zA-Z0-9]+?))$" dummy-variable-rgx = "^$" -======= -# Allow unused variables when underscore-prefixed. -dummy-variable-rgx = "^(_+|(_+[a-zA-Z0-9_]*[a-zA-Z0-9]+?))$" ->>>>>>> d9ab19c (Testing pre-commit and updated lint.yml to disregard legacy files) [tool.ruff.format] # Like Black, use double quotes for strings. From 6c0af23caa2c4895ca52651c8cd3e4986eb4d722 Mon Sep 17 00:00:00 2001 From: HugoOnghai Date: Thu, 24 Jul 2025 10:30:01 -0700 Subject: [PATCH 44/65] Attempting first run of testing suite with github actions --- src/mp_cite/core.py | 9 + test_media_files/another_media_file.txt | 7 + test_media_files/best_media_file.txt | 3 + test_media_files/media_file.txt | 1 + tests/conf_test.py | 55 ++++++ tests/conftest.py | 11 -- tests/elink_service_test.py | 16 ++ tests/elinkapi_test.py | 234 ++++++++++++++++++++++++ 8 files changed, 325 insertions(+), 11 deletions(-) create mode 100644 test_media_files/another_media_file.txt create mode 100644 test_media_files/best_media_file.txt create mode 100644 test_media_files/media_file.txt create mode 100644 tests/conf_test.py delete mode 100644 tests/conftest.py create mode 100644 tests/elinkapi_test.py diff --git a/src/mp_cite/core.py b/src/mp_cite/core.py index 2839869..c1747ab 100644 --- a/src/mp_cite/core.py +++ b/src/mp_cite/core.py @@ -4,9 +4,14 @@ from elinkapi import Elink from elinkapi.record import RecordResponse from elinkapi.utils import Validation +<<<<<<< HEAD from pymongo import MongoClient from mp_cite.models import MinimumDARecord +======= +from mp_cite.models import MinimumDARecord +from typing import Literal, TypeAlias +>>>>>>> 5abbf4b (Attempting first run of testing suite with github actions) OstiID: TypeAlias = int @@ -154,8 +159,12 @@ def delete_osti_record(elinkapi: Elink, osti_id: OstiID, reason: str) -> bool: headers={"Authorization": f"Bearer {elinkapi.token}"}, ) Validation.handle_response(response) +<<<<<<< HEAD <<<<<<< HEAD return response.status_code == 204 # True if deleted successfully ======= return response.status_code == 204 # True if deleted successfully >>>>>>> d7a7e39 (Testing pre-commit and updated lint.yml to disregard legacy files) +======= + return response.status_code == 204 # True if deleted successfully +>>>>>>> 5abbf4b (Attempting first run of testing suite with github actions) diff --git a/test_media_files/another_media_file.txt b/test_media_files/another_media_file.txt new file mode 100644 index 0000000..9a64dd0 --- /dev/null +++ b/test_media_files/another_media_file.txt @@ -0,0 +1,7 @@ +WOWWWWWWWWWWWW + + + + + +O \ No newline at end of file diff --git a/test_media_files/best_media_file.txt b/test_media_files/best_media_file.txt new file mode 100644 index 0000000..c708781 --- /dev/null +++ b/test_media_files/best_media_file.txt @@ -0,0 +1,3 @@ +Not actually the best + +Sue Me \ No newline at end of file diff --git a/test_media_files/media_file.txt b/test_media_files/media_file.txt new file mode 100644 index 0000000..c9d49e9 --- /dev/null +++ b/test_media_files/media_file.txt @@ -0,0 +1 @@ +This is a media file. The text is here. that is all. \ No newline at end of file diff --git a/tests/conf_test.py b/tests/conf_test.py new file mode 100644 index 0000000..4a25e36 --- /dev/null +++ b/tests/conf_test.py @@ -0,0 +1,55 @@ +import os +import pytest +from elinkapi import Elink, exceptions +from dotenv import load_dotenv + +load_dotenv() + + +@pytest.fixture +def elink_review_client(): + """ + tests whether or not the elink review client can be properly retrieved. + returns the elink review client + """ + elink_review_api_key = os.getenv("elink_review_api_token") + review_endpoint = os.getenv("ELINK_REVIEW_ENDPOINT") + return Elink(token=elink_review_api_key, target=review_endpoint) + + +@pytest.fixture +def elink_production_client(): + """ + tests whether or not the elink review client can be properly retrieved. + returns the elink review client + """ + elink_prod_api_key = os.getenv("elink_api_PRODUCTION_key") + return Elink(token=elink_prod_api_key) + + +def test_get_single_record(elink_production_client): + try: + record = elink_production_client.get_single_record(1190959) + assert record.title == "Materials Data on Si by Materials Project" + assert record.osti_id == 1190959 + except exceptions.ForbiddenException as fe: + pytest.fail( + f"Forbidden: Check API key or permissions associated with provided API key. {fe}" + ) + except exceptions.BadRequestException as ve: + pytest.fail(f"Bad Request: Possibly incorrect parameters. {ve}") + except Exception as e: + pytest.fail(f"Unexpected error: {e}") + + +def test_query_records(elink_production_client): + try: + elink_production_client.query_records() + except exceptions.ForbiddenException as fe: + pytest.fail( + f"Forbidden: Check API key or permissions associated with provided API key. {fe}" + ) + except exceptions.BadRequestException as ve: + pytest.fail(f"Bad Request: Possibly incorrect parameters. {ve}") + except Exception as e: + pytest.fail(f"Unexpected error: {e}") diff --git a/tests/conftest.py b/tests/conftest.py deleted file mode 100644 index 0e4eaf1..0000000 --- a/tests/conftest.py +++ /dev/null @@ -1,11 +0,0 @@ -import os - -import pytest -from elinkapi import Elink - - -@pytest.fixture -def elink_review_client(): - review_endpoint = os.getenv("ELINK_REVIEW_ENDPOINT") - elink_review_api_key = os.getenv("ELINK_REVIEW_API_TOKEN") - return Elink(token=elink_review_api_key, target=review_endpoint) diff --git a/tests/elink_service_test.py b/tests/elink_service_test.py index 0857d35..3e1c2b1 100644 --- a/tests/elink_service_test.py +++ b/tests/elink_service_test.py @@ -1,14 +1,30 @@ +from elinkapi import Elink from elinkapi.record import RecordResponse +import pytest +import os +from dotenv import load_dotenv + +load_dotenv() # TODO: Write tests that verify our usage of Elink is correct, # and make sure any upstream breaking changes get caught # here when version upgrades happen + # 1. general query logic + params that we use regularly? # 2. make sure we can submit a correctly templated dataset submission # 3. make sure record updates work # 4. deleting records? # 5+. test any other surfaces of the Elink api that we interact with +@pytest.fixture +def elink_review_client(): + """ + tests whether or not the elink review client can be properly retrieved. + returns the elink review client + """ + elink_review_api_key = os.getenv("elink_review_api_token") + review_endpoint = os.getenv("ELINK_REVIEW_ENDPOINT") + return Elink(token=elink_review_api_key, target=review_endpoint) def test_elink_query(elink_review_client): diff --git a/tests/elinkapi_test.py b/tests/elinkapi_test.py new file mode 100644 index 0000000..0eb6725 --- /dev/null +++ b/tests/elinkapi_test.py @@ -0,0 +1,234 @@ +import pytest +from elinkapi import Elink, Record, exceptions +import os +from src.mp_cite.core import make_minimum_record_to_fully_release +from dotenv import load_dotenv +from datetime import datetime + +load_dotenv() + + +valid_save_json = { + "title": "Electron microscope data for photons", + "site_ownership_code": "LLNL", + "product_type": "TR", + "description": "Hello, from teh other side", +} +valid_save_update_json = { + "title": "Electron microscope data for photons", + "site_ownership_code": "LLNL", + "product_type": "TR", + "description": "A NEW custom description. Search on 'Allo-ballo holla olah'.", +} +invalid_save_json = {"product_type": "TD", "site_ownership_code": "LLNL"} +valid_submit_json = { + "persons": [ + { + "type": "AUTHOR", + "first_name": "Required", + "middle_name": "Optional", + "last_name": "Required", + "email": ["optional@optional.org"], + "orcid": "0000000155554447", + "phone": "Optional", + "affiliations": [{"name": "Optional"}], + }, + { + "type": "RELEASE", + "first_name": "Required", + "middle_name": "Optional", + "last_name": "Required", + "email": ["required@required.org"], + "phone": "Optional", + }, + { + "type": "CONTRIBUTING", + "first_name": "Required", + "middle_name": "Optional", + "last_name": "Required", + "email": ["optional@optional.org"], + "phone": "Optional", + "contributor_type": "Producer", + "affiliations": [{"name": "Optional"}], + }, + ], + "organizations": [ + {"type": "AUTHOR", "name": "Required"}, + {"type": "CONTRIBUTING", "name": "Required", "contributor_type": "Producer"}, + { + "type": "SPONSOR", + "name": "Required", + "identifiers": [ + {"type": "CN_NONDOE", "value": "Required"}, + {"type": "CN_DOE", "value": "SC0001234"}, + {"type": "AWARD_DOI", "value": "Optional"}, + ], + }, + {"type": "RESEARCHING", "name": "Required"}, + ], + "identifiers": [ + {"type": "CN_DOE", "value": "SC0001234"}, + {"type": "CN_NONDOE", "value": "Required"}, + ], + "related_identifiers": [], + "access_limitations": ["UNL"], + "country_publication_code": "US", + "description": "Information about a particular record, report, or other document, or executive summary or abstract of same.", + "languages": ["English"], + "product_type": "TR", + "publication_date": "2018-02-21", + "publication_date_text": "Winter 2012", + "released_to_osti_date": "2023-03-03", + "site_ownership_code": "LBNL", + "title": "Sample document title", +} + + +@pytest.fixture +def elink_review_client(): + """ + tests whether or not the elink review client can be properly retrieved. + returns the elink review client + """ + elink_review_api_key = os.getenv("elink_review_api_token") + review_endpoint = os.getenv("ELINK_REVIEW_ENDPOINT") + return Elink(token=elink_review_api_key, target=review_endpoint) + + +@pytest.fixture +def elink_production_client(): + """ + tests whether or not the elink review client can be properly retrieved. + returns the elink review client + """ + elink_prod_api_key = os.getenv("elink_api_PRODUCTION_key") + return Elink(token=elink_prod_api_key) + + +osti_id = "2300069" +# osti_id = 2300063 +media_id = "1900082" +reason = "I wanted to" +revision_number = "2" +date = datetime.now() +state = "save" +file_path = "./test_media_files/media_file.txt" +file_path2 = "./test_media_files/best_media_file.txt" +file_path3 = "./test_media_files/another_media_file.txt" +json_responses = [] +reserved_osti_id = 1 + + +# RECORD ENDPOINTS +# Post a new Record +@pytest.fixture +def test_post_new_record(elink_review_client): + record_to_post = make_minimum_record_to_fully_release( + title="Test Post Record - PyTest" + ) + # try: + # saved_record = elink_review_client.post_new_record(record_to_post, "save") # Works - saved + # except exceptions.ForbiddenException as fe: + # pytest.fail(f"Forbidden: Check API key or permissions associated with provided API key. {fe}") + # except exceptions.BadRequestException as ve: + # pytest.fail(f"Bad Request: Possibly incorrect parameters. {ve}") + # except Exception as e: + # pytest.fail(f"Unexpected error: {e}") + + try: + submitted_record = elink_review_client.post_new_record( + record_to_post, "submit" + ) # Works - submit + return submitted_record + except exceptions.ForbiddenException as fe: + pytest.fail( + f"Forbidden: Check API key or permissions associated with provided API key. {fe}" + ) + except exceptions.BadRequestException as ve: + pytest.fail(f"Bad Request: Possibly incorrect parameters. {ve}") + except Exception as e: + pytest.fail(f"Unexpected error: {e}") + + +def test_get_new_single_record(test_post_new_record): + # record_to_post = make_minimum_record_to_fully_release(title="Test Getting New Single Record - PyTest") + # submitted_record = elink_review_client.post_new_record(record_to_post, "submit") + + posted_record = test_post_new_record + + elink_review_api_key = os.getenv("elink_review_api_token") + review_endpoint = os.getenv("ELINK_REVIEW_ENDPOINT") + elink_review_client = Elink(token=elink_review_api_key, target=review_endpoint) + + osti_id = test_post_new_record.osti_id + + single_record = elink_review_client.get_single_record(osti_id) + + assert osti_id is not None + assert single_record.title == posted_record.title + # assert single_record.organizations == record_to_post.organizations # this doesn't work because Elink's pydantic model defaults empty identifier to [], where as an empty identifier field is returned as None. + # assert single_record.persons == record_to_post.persons # same issue as above^ + assert single_record.publication_date == posted_record.publication_date + + +def test_invalid_query(elink_production_client): + list_of_records = elink_production_client.query_records( + title="Allo-ballo holla olah" + ) # works, nothing found + assert list_of_records.total_rows == 0 + + +# Reserve a DOI +def test_reserve_DOI(elink_review_client): + try: + elink_review_client.reserve_doi(Record(**valid_save_json)) # works - naved + except Exception: + print("failed to reserve doi on record") + + +def test_update_record(test_post_new_record): + posted_record = test_post_new_record + osti_id = posted_record.osti_id + + elink_review_api_key = os.getenv("elink_review_api_token") + review_endpoint = os.getenv("ELINK_REVIEW_ENDPOINT") + elink_review_client = Elink(token=elink_review_api_key, target=review_endpoint) + + # Update an existing Record + elink_review_client.update_record( + osti_id, + make_minimum_record_to_fully_release("Test Updating Record - PyTest"), + "submit", + ) # works + + # Get Revision based on revision number + elink_review_client.get_revision_by_number(osti_id, revision_number) # works + # Get Revision based on date Currently Not Working...? + # revision_by_date = elink_review_client.get_revision_by_date(osti_id, date.strftime("%Y-%d-%m")) # works + # Get all RevisionHistory of a Record + revision_history = elink_review_client.get_all_revisions(osti_id) # works + revision_history[0] + revision_history[-1] + + # # MEDIA ENDPOINTS + # # Associate new Media with a Record + # posted_media = elink_review_client.post_media(osti_id, file_path, {"title": "Title of the Media media_file.txt"}) + # posted_media3 = elink_review_client.post_media(osti_id, file_path3, {"title": "Title of the Media media_file.txt"}) + # media_id = posted_media.media_id + # # Replace existing Media on a Record + # replaced_media2 = elink_review_client.put_media(osti_id, media_id, file_path2, {"title": "Changed this title now"}) + # # Get Media associated with OSTI ID + # media = elink_review_client.get_media(osti_id) + # # Get Media content of a media resource + # media_content = elink_review_client.get_media_content(media_id) + # # Delete Media with media_id off of a Record + # isSuccessDelete = elink_review_client.delete_single_media(osti_id, media_id, reason) #works + # assert isSuccessDelete + # # Delete all Media associated with a Record + # isSuccessAllDelete = elink_review_client.delete_all_media(osti_id, reason) + # assert isSuccessAllDelete + + # # Should see that all media has been deleted + # final_media = elink_review_client.get_media(osti_id) + + # print("Finished") From fae3ecef745edcc7355f693195e1d368caf04e45 Mon Sep 17 00:00:00 2001 From: HugoOnghai Date: Thu, 24 Jul 2025 10:35:45 -0700 Subject: [PATCH 45/65] Forgot to allow for action to run on push to testing-suite --- .github/workflows/testing.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/testing.yml b/.github/workflows/testing.yml index 9f71a9e..5af2906 100644 --- a/.github/workflows/testing.yml +++ b/.github/workflows/testing.yml @@ -2,9 +2,9 @@ name: testing on: push: - branches: [master] + branches: [master, testing-suite] pull_request: - branches: [master] + branches: [master, testing-suite] jobs: test: From c16c353a3b292b05fcfd834a5d05f7738e0afce6 Mon Sep 17 00:00:00 2001 From: HugoOnghai Date: Thu, 24 Jul 2025 10:52:48 -0700 Subject: [PATCH 46/65] Trying to fix environment variables... --- .github/workflows/testing.yml | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/.github/workflows/testing.yml b/.github/workflows/testing.yml index 5af2906..c2a809d 100644 --- a/.github/workflows/testing.yml +++ b/.github/workflows/testing.yml @@ -33,7 +33,8 @@ jobs: - name: Run tests env: - ELINK_REVIEW_API_TOKEN: ${{ secrets.ELINK_REVIEW_API_TOKEN }} + elink_api_PRODUCTION_key: ${{ secrets.ELINK_PRODUCTION_API_TOKEN }} + elink_review_api_token: ${{ secrets.ELINK_REVIEW_API_TOKEN }} ELINK_REVIEW_ENDPOINT: ${{ secrets.ELINK_REVIEW_ENDPOINT }} run: uv run pytest tests # codecov? From c8e1c620e8fcddfa04fed98a936b4f278cb9dc22 Mon Sep 17 00:00:00 2001 From: HugoOnghai Date: Thu, 24 Jul 2025 11:02:55 -0700 Subject: [PATCH 47/65] Trying to resolve install dotenv issue --- .github/workflows/testing.yml | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/.github/workflows/testing.yml b/.github/workflows/testing.yml index c2a809d..ad6caa6 100644 --- a/.github/workflows/testing.yml +++ b/.github/workflows/testing.yml @@ -29,7 +29,10 @@ jobs: python-version: ${{ matrix.python-version }} - name: Install the project - run: uv sync --locked --all-extras --dev + run: | + uv sync --locked --all-extras --dev + uv pip install python-dotenv + - name: Run tests env: From 8abb43e2f216a5c44e9883dce571efccac09d65e Mon Sep 17 00:00:00 2001 From: HugoOnghai Date: Thu, 24 Jul 2025 11:34:26 -0700 Subject: [PATCH 48/65] Verifying which packages install during workflow --- .github/workflows/testing.yml | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/.github/workflows/testing.yml b/.github/workflows/testing.yml index ad6caa6..16b1cd0 100644 --- a/.github/workflows/testing.yml +++ b/.github/workflows/testing.yml @@ -29,10 +29,10 @@ jobs: python-version: ${{ matrix.python-version }} - name: Install the project - run: | - uv sync --locked --all-extras --dev - uv pip install python-dotenv + run: uv sync --locked --all-extras --dev + - name: Verify installed packages + run: uv pip list - name: Run tests env: From 6a4c768ff35dc223d54c0b7f007072dd221d83af Mon Sep 17 00:00:00 2001 From: HugoOnghai Date: Thu, 24 Jul 2025 11:41:46 -0700 Subject: [PATCH 49/65] Trying to update uv.lock --- .github/workflows/testing.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/testing.yml b/.github/workflows/testing.yml index 16b1cd0..03adf8e 100644 --- a/.github/workflows/testing.yml +++ b/.github/workflows/testing.yml @@ -39,5 +39,5 @@ jobs: elink_api_PRODUCTION_key: ${{ secrets.ELINK_PRODUCTION_API_TOKEN }} elink_review_api_token: ${{ secrets.ELINK_REVIEW_API_TOKEN }} ELINK_REVIEW_ENDPOINT: ${{ secrets.ELINK_REVIEW_ENDPOINT }} - run: uv run pytest tests + run: uvx pytest tests # codecov? From 59eae1b797dbd3b4a7049ef7cef6efe8c46e9d76 Mon Sep 17 00:00:00 2001 From: HugoOnghai Date: Thu, 24 Jul 2025 11:56:25 -0700 Subject: [PATCH 50/65] Fixed/added python-dotenv in the uv.lock file --- pyproject.toml | 1 + uv.lock | 11 +++++++++++ 2 files changed, 12 insertions(+) diff --git a/pyproject.toml b/pyproject.toml index e93ed31..d589bfb 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -17,6 +17,7 @@ dependencies = [ "elinkapi>=0.4.9", "pydantic>=2.11.7", "pymongo>=4.13.2", + "python-dotenv>=1.1.1", ] [dependency-groups] diff --git a/uv.lock b/uv.lock index e6682a7..287c431 100644 --- a/uv.lock +++ b/uv.lock @@ -303,6 +303,7 @@ dependencies = [ { name = "elinkapi" }, { name = "pydantic" }, { name = "pymongo" }, + { name = "python-dotenv" }, ] [package.dev-dependencies] @@ -323,6 +324,7 @@ requires-dist = [ { name = "elinkapi", specifier = ">=0.4.9" }, { name = "pydantic", specifier = ">=2.11.7" }, { name = "pymongo", specifier = ">=4.13.2" }, + { name = "python-dotenv", specifier = ">=1.1.1" }, ] [package.metadata.requires-dev] @@ -608,6 +610,15 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/ec/57/56b9bcc3c9c6a792fcbaf139543cee77261f3651ca9da0c93f5c1221264b/python_dateutil-2.9.0.post0-py2.py3-none-any.whl", hash = "sha256:a8b2bc7bffae282281c8140a97d3aa9c14da0b136dfe83f850eea9a5f7470427", size = 229892, upload-time = "2024-03-01T18:36:18.57Z" }, ] +[[package]] +name = "python-dotenv" +version = "1.1.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/f6/b0/4bc07ccd3572a2f9df7e6782f52b0c6c90dcbb803ac4a167702d7d0dfe1e/python_dotenv-1.1.1.tar.gz", hash = "sha256:a8a6399716257f45be6a007360200409fce5cda2661e3dec71d23dc15f6189ab", size = 41978, upload-time = "2025-06-24T04:21:07.341Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/5f/ed/539768cf28c661b5b068d66d96a2f155c4971a5d55684a514c1a0e0dec2f/python_dotenv-1.1.1-py3-none-any.whl", hash = "sha256:31f23644fe2602f88ff55e1f5c79ba497e01224ee7737937930c448e4d0e24dc", size = 20556, upload-time = "2025-06-24T04:21:06.073Z" }, +] + [[package]] name = "pyyaml" version = "6.0.2" From 69fb545075ae20d2ff127d86c2bde151e1d9f86b Mon Sep 17 00:00:00 2001 From: HugoOnghai Date: Thu, 24 Jul 2025 12:48:14 -0700 Subject: [PATCH 51/65] Returning to using uv run pytest tests --- .github/workflows/testing.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/testing.yml b/.github/workflows/testing.yml index 03adf8e..16b1cd0 100644 --- a/.github/workflows/testing.yml +++ b/.github/workflows/testing.yml @@ -39,5 +39,5 @@ jobs: elink_api_PRODUCTION_key: ${{ secrets.ELINK_PRODUCTION_API_TOKEN }} elink_review_api_token: ${{ secrets.ELINK_REVIEW_API_TOKEN }} ELINK_REVIEW_ENDPOINT: ${{ secrets.ELINK_REVIEW_ENDPOINT }} - run: uvx pytest tests + run: uv run pytest tests # codecov? From 4cbba6960b838bfe750808067f144ba2743a5446 Mon Sep 17 00:00:00 2001 From: HugoOnghai Date: Thu, 24 Jul 2025 12:50:24 -0700 Subject: [PATCH 52/65] Removed extraneous test file --- tests/test_elink_api.py | 109 ---------------------------------------- 1 file changed, 109 deletions(-) delete mode 100644 tests/test_elink_api.py diff --git a/tests/test_elink_api.py b/tests/test_elink_api.py deleted file mode 100644 index 80afba7..0000000 --- a/tests/test_elink_api.py +++ /dev/null @@ -1,109 +0,0 @@ -import os -from dotenv import load_dotenv - -from elinkapi import Elink, Record, exceptions -import pytest -from mpcite.models import ELinkGetResponseModel, TestClass - -from pymongo import MongoClient -import pymongo - -load_dotenv() - -atlas_user = os.environ.get("atlas_user") -atlas_password = os.environ.get("atlas_password") -atlas_host = os.environ.get("atlas_host") -mongo_uri = f"mongodb+srv://{atlas_user}:{atlas_password}@{atlas_host}/" - -api = Elink(token=os.environ.get("elink_api_PRODUCTION_key")) # target default is production E-link service. - -### Grabbing an existing record - -# record = api.get_single_record(mp-id) # test for silicon - -# type(record) - -# ELinkGotRecordModel = ELinkGetResponseModel.from_elinkapi_record(record) - -# print(ELinkGotRecordModel.get_title()) -# print(ELinkGotRecordModel.get_site_url()) -# print(ELinkGotRecordModel.get_keywords()) -# print(ELinkGotRecordModel.get_default_description()) - -# ELinkTestGetRecordModel = TestClass(**record.model_dump()) - -### Making a new record - -# with MongoClient(mongo_uri) as client: -# #get all material_ids and dois from doi collection -# doi_collection = client["mp_core"]["dois"] -# materials_to_update = list(doi_collection.find({}, {"_id": 0, "material_id": 1, "doi": 1}, limit=10)) -# material_ids = [entry["material_id"] for entry in materials_to_update] - -# # check # of material_ids from DOI collection vs amount in robocrys - -# # get description for material_ids from robocrys collection -# coll = client["mp_core_blue"]["robocrys"] -# res = list(coll.find({"material_id": {"$in": material_ids}}, {"_id": 0, "material_id": 1, "description": 1})) - -# # join on material_id -# for doc in res: -# mat = next(filter(lambda x: x["material_id"] == doc["material_id"], materials_to_update)) -# doc["doi"] = mat["doi"] - - -# {"material_id": ..., "doi": ..., "description": ...} -> -# Record( -# template_fields ..., -# doi: ..., -# description: ..., -# fields_where_material_id_makes_sense: ..., -# ) - -# with the client open -with MongoClient(mongo_uri) as client: - # get all dois from the collection - doi_collection = client["mp_core"]["dois"] - materials_to_update = list(doi_collection.find({}, {"_id": 0, "doi": 1, "material_id": 1}, limit=2)) - - # from the doi collection, grab the material_id and doi of each material - material_ids = [entry["material_id"] for entry in materials_to_update] - - # additionally, gain the osti id from the doi - osti_ids = [entry["doi"].split("10.17188/")[1] for entry in materials_to_update] - - # additionally, grab the description of each material from the robocrys - coll = client["mp_core_blue"]["robocrys"] # grabs robocrys collection from active database - res = list(coll.find({"material_id": {"$in": material_ids}}, {"_id": 0, "material_id": 1, "description": 1})) # grabs the material id and description of entries in the collection - descriptions = [entry["description"] for entry in res] - - # for each material (and its material_id, doi, and osti_id) - for i in range(len(materials_to_update)): - internal_material_id = material_ids[i] - internal_osti_id = osti_ids[i] - internal_description = descriptions[i] - - # get_single_record(osti_id) - record = api.get_single_record(internal_osti_id) - - print(f"\n \n \nPrinting what is currently on ELINK for {internal_material_id}*****************************************") - print(record) - - if internal_material_id == record.site_unique_id: - # update description - record.description = "testTESTtestTESTtest" - - print(f"\n \n \nPrinting record for {internal_material_id}~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~") - print(record) - - # # post updated record - # try: - # saved_record = api.post_new_record(record, "save") - # except exceptions.BadRequestException as ve: - # ... - # # ve.message = "Site Code AAAA is not valid." - # # ve.errors provides more details: - # # [{"status":"400", "detail":"Site Code AAAA is not valid.", "source":{"pointer":"site_ownership_code"}}] - - - From 43f2f87af7dae587b66099c1107c80e9c8ccc396 Mon Sep 17 00:00:00 2001 From: HugoOnghai Date: Wed, 6 Aug 2025 13:36:38 -0700 Subject: [PATCH 53/65] Trying to fix the broken elinkapi_tests but they depend on the new DAMinimum model in new-core-to-rebase... --- __init__.py | 0 src/__init__.py | 0 tests/elinkapi_test.py | 9 ++++++++- 3 files changed, 8 insertions(+), 1 deletion(-) create mode 100644 __init__.py create mode 100644 src/__init__.py diff --git a/__init__.py b/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/src/__init__.py b/src/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/tests/elinkapi_test.py b/tests/elinkapi_test.py index 0eb6725..2f4eae9 100644 --- a/tests/elinkapi_test.py +++ b/tests/elinkapi_test.py @@ -1,10 +1,17 @@ import pytest from elinkapi import Elink, Record, exceptions + +import sys import os -from src.mp_cite.core import make_minimum_record_to_fully_release + from dotenv import load_dotenv from datetime import datetime +sys.path.insert(0, os.path.abspath(os.path.join(os.path.dirname(__file__), ".."))) +from src.mp_cite.core import ( + make_minimum_record_to_fully_release, +) # cannot find a good workaround for this... + load_dotenv() From 6cf7bbac0b92e1397e54e552bff9df644d09fb24 Mon Sep 17 00:00:00 2001 From: HugoOnghai Date: Thu, 7 Aug 2025 11:37:07 -0700 Subject: [PATCH 54/65] Finished rebase after new core PR merged, trying to clean up and get tests running again --- .github/workflows/release.yml | 8 --- .github/workflows/testing.yml | 15 +---- src/mp_cite/core.py | 14 +---- src/mp_cite/models.py | 112 ---------------------------------- 4 files changed, 4 insertions(+), 145 deletions(-) diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index 6f3c42d..e5d2a28 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -1,7 +1,6 @@ name: release on: -<<<<<<< HEAD release: types: [published] @@ -71,10 +70,3 @@ jobs: - name: Build and Deploy! run: uvx mkdocs gh-deploy -======= - push: - branches: [master] - pull_request: - branches: [master] -# TODO: setup release to pypi ->>>>>>> 52382ff (Merged upstream (#1)) diff --git a/.github/workflows/testing.yml b/.github/workflows/testing.yml index 16b1cd0..c5bc018 100644 --- a/.github/workflows/testing.yml +++ b/.github/workflows/testing.yml @@ -2,9 +2,9 @@ name: testing on: push: - branches: [master, testing-suite] + branches: [master] pull_request: - branches: [master, testing-suite] + branches: [master] jobs: test: @@ -17,11 +17,6 @@ jobs: runs-on: ${{ matrix.os }} steps: - uses: actions/checkout@v4 -<<<<<<< HEAD -======= - with: - fetch-depth: 0 ->>>>>>> 52382ff (Merged upstream (#1)) - name: Install uv uses: astral-sh/setup-uv@v6 @@ -31,13 +26,9 @@ jobs: - name: Install the project run: uv sync --locked --all-extras --dev - - name: Verify installed packages - run: uv pip list - - name: Run tests env: elink_api_PRODUCTION_key: ${{ secrets.ELINK_PRODUCTION_API_TOKEN }} elink_review_api_token: ${{ secrets.ELINK_REVIEW_API_TOKEN }} ELINK_REVIEW_ENDPOINT: ${{ secrets.ELINK_REVIEW_ENDPOINT }} - run: uv run pytest tests -# codecov? + run: uv run pytest tests \ No newline at end of file diff --git a/src/mp_cite/core.py b/src/mp_cite/core.py index c1747ab..6d6790b 100644 --- a/src/mp_cite/core.py +++ b/src/mp_cite/core.py @@ -4,17 +4,13 @@ from elinkapi import Elink from elinkapi.record import RecordResponse from elinkapi.utils import Validation -<<<<<<< HEAD from pymongo import MongoClient from mp_cite.models import MinimumDARecord -======= -from mp_cite.models import MinimumDARecord -from typing import Literal, TypeAlias ->>>>>>> 5abbf4b (Attempting first run of testing suite with github actions) OstiID: TypeAlias = int + def find_out_of_date_doi_entries( rc_client: MongoClient, doi_client: MongoClient, @@ -159,12 +155,4 @@ def delete_osti_record(elinkapi: Elink, osti_id: OstiID, reason: str) -> bool: headers={"Authorization": f"Bearer {elinkapi.token}"}, ) Validation.handle_response(response) -<<<<<<< HEAD -<<<<<<< HEAD - return response.status_code == 204 # True if deleted successfully -======= - return response.status_code == 204 # True if deleted successfully ->>>>>>> d7a7e39 (Testing pre-commit and updated lint.yml to disregard legacy files) -======= return response.status_code == 204 # True if deleted successfully ->>>>>>> 5abbf4b (Attempting first run of testing suite with github actions) diff --git a/src/mp_cite/models.py b/src/mp_cite/models.py index 71b4c9a..8bbaf74 100644 --- a/src/mp_cite/models.py +++ b/src/mp_cite/models.py @@ -1,4 +1,3 @@ -<<<<<<< HEAD from pydantic import BaseModel, Field, model_validator from datetime import datetime @@ -83,114 +82,3 @@ class MinimumDARecord(Record): default_factory=lambda: datetime.now(tz=pytz.UTC) ) site_url: str = Field(default="https://next-gen.materialsproject.org/materials") -======= -from pydantic import BaseModel, Field, ConfigDict -from typing import List, Dict, Optional -import datetime -from enum import Enum -import bibtexparser -from elinkapi import Elink, Record -from elinkapi.record import RecordResponse, AccessLimitation, JournalType -from elinkapi.geolocation import Geolocation -from elinkapi.identifier import Identifier -from elinkapi.related_identifier import RelatedIdentifier -from elinkapi.person import Person -from elinkapi.organization import Organization - -class TestClass(RecordResponse): - ... - # stuff - -class ELinkGetResponseModel(BaseModel): - osti_id: Optional[int] = Field(...) - dataset_type: str = Field(default="SM") - title: str = Field(...) - persons: List[Person] - contributors: List[Dict[str, str]] = Field( - default=[{"first_name": "Materials", "last_name": "Project"}], - description="List of Dict of first name, last name mapping", - ) # no contributor - publication_date: datetime.date - site_url: str = Field(...) - doi: dict = Field( - {}, title="DOI info", description="Mainly used during GET request" - ) - mp_id: str | None = None - keywords: List[str] = None - - @classmethod - def from_elinkapi_record(cls, R): - gotResponse = ELinkGetResponseModel( - osti_id = R.osti_id, - title = R.title, - persons = R.persons, - # assume default contributors for now, creators vs contributors? - publication_date = R.publication_date, - site_url = R.site_url, - doi = {"doi": R.doi}, - mp_id = next((id.value for id in R.identifiers if id.type == 'RN'), None), - keywords = R.keywords - ) - - return gotResponse - - def get_title(self): - formula = self.keywords[1] - return "Materials Data on %s by Materials Project" % formula - - def get_site_url(self): - return "https://materialsproject.org/materials/%s" % self.mp_id - - def get_keywords(self): - # keywords = "; ".join( - # ["crystal structure", material.pretty_formula, material.chemsys] - # ) - return self.keywords - - @classmethod - def get_default_description(cls): - return ( - "Computed materials data using density " - "functional theory calculations. These calculations determine " - "the electronic structure of bulk materials by solving " - "approximations to the Schrodinger equation. For more " - "information, see https://materialsproject.org/docs/calculations" - ) - - @classmethod - def custom_to_dict(cls, elink_record) -> dict: - if elink_record.osti_id is None or elink_record.osti_id == "": - return elink_record.dict(exclude={"osti_id", "doi"}) - else: - return elink_record.dict(exclude={"doi"}) - - -class ElinkResponseStatusEnum(Enum): - SUCCESS = "SUCCESS" - FAILED = "FAILURE" - - -class ELinkPostResponseModel(BaseModel): - osti_id: str - accession_num: str - product_nos: str - title: str - contract_nos: str - other_identifying_nos: Optional[str] - doi: Dict[str, str] - status: ElinkResponseStatusEnum - status_message: Optional[str] - - def generate_doi_record(self): - doi_collection_record = DOIRecordModel( - material_id=self.accession_num, - doi=self.doi["#text"], - status=self.doi["@status"], - bibtex=None, - valid=True, - last_validated_on=datetime.now(), - ) - doi_collection_record.set_status(status=self.doi["@status"]) - doi_collection_record.last_validated_on = datetime.now() - return doi_collection_record ->>>>>>> 5fa46e4 (Merged upstream (#1)) From c725d0fffe2a8270abc7dd752e3c0e39ed1196ce Mon Sep 17 00:00:00 2001 From: HugoOnghai Date: Thu, 7 Aug 2025 11:44:37 -0700 Subject: [PATCH 55/65] Removed extra files in tests/ and updated email on pyproject.toml --- legacy/mpcite/recordresponse_example.txt | 0 pyproject.toml | 1 - src/mp_cite/core.py | 16 +++ src/mp_cite/send_collection.py | 79 --------------- tests/manage_backfills.py | 49 --------- tests/outputs.txt | 46 --------- tests/prod_to_review.py | 120 ----------------------- uv.lock | 11 --- 8 files changed, 16 insertions(+), 306 deletions(-) delete mode 100644 legacy/mpcite/recordresponse_example.txt delete mode 100644 src/mp_cite/send_collection.py delete mode 100644 tests/manage_backfills.py delete mode 100644 tests/outputs.txt delete mode 100644 tests/prod_to_review.py diff --git a/legacy/mpcite/recordresponse_example.txt b/legacy/mpcite/recordresponse_example.txt deleted file mode 100644 index e69de29..0000000 diff --git a/pyproject.toml b/pyproject.toml index d589bfb..e93ed31 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -17,7 +17,6 @@ dependencies = [ "elinkapi>=0.4.9", "pydantic>=2.11.7", "pymongo>=4.13.2", - "python-dotenv>=1.1.1", ] [dependency-groups] diff --git a/src/mp_cite/core.py b/src/mp_cite/core.py index 6d6790b..38be7c1 100644 --- a/src/mp_cite/core.py +++ b/src/mp_cite/core.py @@ -1,3 +1,4 @@ +<<<<<<< HEAD from typing import Literal, TypeAlias import requests @@ -7,6 +8,21 @@ from pymongo import MongoClient from mp_cite.models import MinimumDARecord +======= +from typing import TypeAlias + +from elinkapi import Elink +from elinkapi.record import RecordResponse +from pymongo import MongoClient + +import requests +from elinkapi.utils import Validation + + +from models import MinimumDARecord + +from typing import Literal +>>>>>>> 0346ff4 (Removed extra files in tests/ and updated email on pyproject.toml) OstiID: TypeAlias = int diff --git a/src/mp_cite/send_collection.py b/src/mp_cite/send_collection.py deleted file mode 100644 index 0ce65a3..0000000 --- a/src/mp_cite/send_collection.py +++ /dev/null @@ -1,79 +0,0 @@ -from pathlib import Path -from xml.dom.minidom import parseString -from dicttoxml import dicttoxml -from mpcite.doi_builder import DOIBuilder -import json -from monty.json import MontyDecoder -from pydantic import BaseModel, Field -from typing import List - -default_description = ( - "Computed materials data using density functional theory calculations. These " - "calculations determine the electronic structure of bulk materials by solving " - "approximations to the Schrodinger equation. For more information, " - "see https://materialsproject.org/docs/calculations" -) - - -class CollectionsModel(BaseModel): - title: str = Field(default="Sample Title") - product_type: str = Field(default="DC") - relidentifiersblock: List[List[str]] = Field() - contributors: List[dict] - description: str = Field(default=default_description) - site_url: str = Field(default="https://materialsproject.org/") - - -config_file = Path("/Users/michaelwu/Desktop/projects/MPCite/files/config_prod.json") - -bld: DOIBuilder = json.load(config_file.open("r"), cls=MontyDecoder) -bld.config_file_path = config_file.as_posix() - -records = [ - CollectionsModel( - relidentifiersblock=[["mp-1", "mp-2", "mp-1"]], - contributors=[ - { - "first_name": "Michael", - "last_name": "Wu", - "email": "wuxiaohua1011@berkeley.edu", - } - ], - ).dict(), - CollectionsModel( - relidentifiersblock=[["mp-21"], ["mp-22"]], - contributors=[ - { - "first_name": "Michael", - "last_name": "Wu", - "email": "wuxiaohua1011@berkeley.edu", - } - ], - ).dict(), -] - - -def my_item_func(x): - if x == "records": - return "record" - elif x == "contributors": - return "contributor" - elif x == "relidentifier_detail": - return "related_identifier" - elif x == "relidentifiersblock": - return "relidentifier_detail" - else: - return "item" - - -records_xml = parseString( - dicttoxml(records, custom_root="records", attr_type=False, item_func=my_item_func) -) - -for item in records_xml.getElementsByTagName("relidentifier_detail"): - item.setAttribute("type", "accession_num") - item.setAttribute("relationType", "Compiles") - -print(records_xml.toprettyxml()) -# response = bld.elink_adapter.post_collection(data=records_xml.toxml()) -# print(response) diff --git a/tests/manage_backfills.py b/tests/manage_backfills.py deleted file mode 100644 index a835456..0000000 --- a/tests/manage_backfills.py +++ /dev/null @@ -1,49 +0,0 @@ -# This script will see how many documents in ELink, i.e. ones with a DOI, are not accounted for in the internal DOI collection. - -from elinkapi import Elink, Query, Record - -import os -from dotenv import load_dotenv - -load_dotenv() # depends on the root directory from which you run your python scripts. - -api = Elink(token = os.environ.get("elink_api_PRODUCTION_key")) - - -query1 = api.query_records(rows=1000) - -materials_with_dois : list[Record] = [] - -for page in query1: - print(f"Now on Page: {page.title}") - print(f"Material_ID: {page.site_unique_id} and DOI: http://doi.org/{page.doi}") - - if page.site_unique_id.startswith("mp-"): - materials_with_dois.append(page) - - # for record in page.data: - # if record.site_unique_id.startswith("mp-"): - # materials_with_dois.append(record) - - - -# set_q1 = [page for page in query1] -# set_q2 = [page for page in query2] - -# set_diffq1q2 = set(set_q1) - set(set_q2) -# print (f"Difference matched {len(set)} records") - -# filtered = [ -# page for page in query1 -# if page.title.lower().startswith("materials data on") -# ] - -# print (f"Filtered Query1 has {len(filtered)} records") - -# paginate through ALL results -# for page in query1: -# print(page.title) -# print(f"Material_ID: {page.site_unique_id} and DOI: http://doi.org/{page.doi}") - -# for record in page.data: -# print (f"OSTI ID: {record.osti_id} Title: {record.title}") \ No newline at end of file diff --git a/tests/outputs.txt b/tests/outputs.txt deleted file mode 100644 index 8d188e7..0000000 --- a/tests/outputs.txt +++ /dev/null @@ -1,46 +0,0 @@ -(mpcite-env) C:\Users\ongha\OneDrive\Documents\GitHub\MPCite>C:/Users/ongha/anaconda3/envs/mpcite-env/python.exe c:/Users/ongha/OneDrive/Documents/GitHub/MPCite/tests/prod_to_review.py - -Query retrieved 144845 record(s) -Page finished. Now at 500 data entries. 0 edge cases found. -Page finished. Now at 1000 data entries. 0 edge cases found. -Page finished. Now at 1500 data entries. 0 edge cases found. -Page finished. Now at 2000 data entries. 0 edge cases found. -Page finished. Now at 2500 data entries. 0 edge cases found. -Page finished. Now at 3000 data entries. 0 edge cases found. -Page finished. Now at 3500 data entries. 0 edge cases found. -Page finished. Now at 4000 data entries. 0 edge cases found. -Page finished. Now at 4500 data entries. 0 edge cases found. -Page finished. Now at 5000 data entries. 0 edge cases found. -Page finished. Now at 5500 data entries. 0 edge cases found. -Page finished. Now at 6000 data entries. 0 edge cases found. -Page finished. Now at 6500 data entries. 0 edge cases found. -Page finished. Now at 7000 data entries. 0 edge cases found. -Page finished. Now at 7500 data entries. 0 edge cases found. -Page finished. Now at 8000 data entries. 0 edge cases found. -Page finished. Now at 8500 data entries. 0 edge cases found. -Page finished. Now at 9000 data entries. 0 edge cases found. -Page finished. Now at 9500 data entries. 0 edge cases found. -Page finished. Now at 10000 data entries. 0 edge cases found. -Page finished. Now at 10500 data entries. 0 edge cases found. -Page finished. Now at 11000 data entries. 0 edge cases found. -Page finished. Now at 11500 data entries. 0 edge cases found. -Page finished. Now at 12000 data entries. 0 edge cases found. -Page finished. Now at 12500 data entries. 0 edge cases found. -Page finished. Now at 13000 data entries. 0 edge cases found. -Page finished. Now at 13500 data entries. 0 edge cases found. -Page finished. Now at 14000 data entries. 0 edge cases found. -Page finished. Now at 14500 data entries. 0 edge cases found. - -Traceback (most recent call last): - File "C:\Users\ongha\anaconda3\envs\mpcite-env\Lib\site-packages\elinkapi\query.py", line 95, in __next__ - record = self.data.pop() -IndexError: pop from empty list - -During handling of the above exception, another exception occurred: - -Traceback (most recent call last): - File "c:\Users\ongha\OneDrive\Documents\GitHub\MPCite\tests\prod_to_review.py", line 29, in - record = next(query) - File "C:\Users\ongha\anaconda3\envs\mpcite-env\Lib\site-packages\elinkapi\query.py", line 108, in __next__ - raise StopIteration -StopIteration \ No newline at end of file diff --git a/tests/prod_to_review.py b/tests/prod_to_review.py deleted file mode 100644 index 87e311d..0000000 --- a/tests/prod_to_review.py +++ /dev/null @@ -1,120 +0,0 @@ -from elinkapi import Elink, Query, Record - -import os -from dotenv import load_dotenv - -import json - -load_dotenv() # depends on the root directory from which you run your python scripts. - -review_endpoint = "https://review.osti.gov/elink2api/" - -prod_api = Elink(token = os.environ.get("elink_api_PRODUCTION_key")) -review_api = Elink(token = os.environ.get("elink_review_api_token"), target=review_endpoint) - -print(prod_api.query_records()) - -rows_per_page = 100 - -# query production -query = prod_api.query_records(rows=rows_per_page) -print(f"Query retrieved {query.total_rows} record(s)") - -count_materials_data = 0 -count_MaterialsDataOn = 0 -cwd = os.getcwd() -page_number = 0 -page_json_list = [] - -for record in query: - # increment counter - count_materials_data = count_materials_data + 1 - print(f"On record #{count_materials_data}, next url is {query.next_url}, previous url is {query.previous_url}") - - # see if the record is a Materials Data on record - if record.title.startswith("Materials Data on"): - # increment the MaterialsDataOn counter - count_MaterialsDataOn = count_MaterialsDataOn + 1 - - # prepare the new record for the review environment, remove the OSTI ID, and add its model_dump to the list of json objects for the page. - new_record = record - new_record_dict = new_record.model_dump(exclude_none=True) - - new_record_osti_id = new_record_dict.pop("osti_id") # now new_record_dict does not have the osti_id key. - js = json.dumps(new_record_dict, default=str) # datetime objects are not JSON serializable, so we use default=str to convert them to strings. - - page_json_list.append(js) - - # TODO: take the new_record_dict and make it into a new post to the review environment and save the RecordResponse. - - else: - print(f"Found edge case: {record.title}") - - if count_materials_data % rows_per_page == 0: - # create/open, write, and close new json file - page_number = count_materials_data / rows_per_page - path = f'/json_pages/page_number_{page_number}' - fp = open(cwd+path, 'a') - - for js in page_json_list: - fp.write(js) - fp.write("\n") - - fp.close() - page_json_list = [] - - print(f"Page {page_number} finished. Now at {count_materials_data} data entries. {count_materials_data - count_MaterialsDataOn} edge cases found.") - -# print remainder of records if not a full page after for loop exits -page_number = page_number + 1 -path = f'/json_pages/page_number_{page_number}' -fp = open(cwd+path, 'a') -for js in page_json_list: - fp.write(js) - fp.write("\n") -fp.close() - -# # if contains materials data on, then add to batch -# for count_materials_data < query.total_rows: - -# # print(f"The length of the query is now {len(query.data)}") -# record = next(query) -# count_materials_data = count_materials_data + 1 - -# if record.title.startswith("Materials Data on"): -# count_MaterialsDataOn = count_MaterialsDataOn + 1 - -# new_record = record -# new_record_dict = new_record.model_dump(exclude_none=True) - -# new_record_osti_id = new_record_dict.pop("osti_id") - -# page_dict[f"Entry OSTI_ID {new_record_osti_id}"] = new_record_dict - -# # TODO: take the new_record_dict and make it into a new post to the review environment and save the RecordResponse. - - - -# if count_materials_data % rows_per_page == 0: -# # if a page has been fully consummed, then print the new batched dictionary to a json file. - -# js = json.dumps(page_dict, default=str) - -# # open new json file if not exist it will create -# cwd = os.getcwd() -# path = f'/json_pages/page_number_{count_materials_data/rows_per_page}' -# fp = open(cwd+path, 'a') - -# # write to json file -# fp.write(js) - -# # close the connection to the file and empty the dict -# fp.close() -# page_dict = {} - -# print(f"Page {(count_materials_data / rows_per_page)} finished. Now at {count_materials_data} data entries. {count_materials_data - count_MaterialsDataOn} edge cases found.") - -# model_dump exclude_none=True, remove null keys -# pop osti_id --> save batch to json files -# make new record -# post to review_api diff --git a/uv.lock b/uv.lock index 287c431..e6682a7 100644 --- a/uv.lock +++ b/uv.lock @@ -303,7 +303,6 @@ dependencies = [ { name = "elinkapi" }, { name = "pydantic" }, { name = "pymongo" }, - { name = "python-dotenv" }, ] [package.dev-dependencies] @@ -324,7 +323,6 @@ requires-dist = [ { name = "elinkapi", specifier = ">=0.4.9" }, { name = "pydantic", specifier = ">=2.11.7" }, { name = "pymongo", specifier = ">=4.13.2" }, - { name = "python-dotenv", specifier = ">=1.1.1" }, ] [package.metadata.requires-dev] @@ -610,15 +608,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/ec/57/56b9bcc3c9c6a792fcbaf139543cee77261f3651ca9da0c93f5c1221264b/python_dateutil-2.9.0.post0-py2.py3-none-any.whl", hash = "sha256:a8b2bc7bffae282281c8140a97d3aa9c14da0b136dfe83f850eea9a5f7470427", size = 229892, upload-time = "2024-03-01T18:36:18.57Z" }, ] -[[package]] -name = "python-dotenv" -version = "1.1.1" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/f6/b0/4bc07ccd3572a2f9df7e6782f52b0c6c90dcbb803ac4a167702d7d0dfe1e/python_dotenv-1.1.1.tar.gz", hash = "sha256:a8a6399716257f45be6a007360200409fce5cda2661e3dec71d23dc15f6189ab", size = 41978, upload-time = "2025-06-24T04:21:07.341Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/5f/ed/539768cf28c661b5b068d66d96a2f155c4971a5d55684a514c1a0e0dec2f/python_dotenv-1.1.1-py3-none-any.whl", hash = "sha256:31f23644fe2602f88ff55e1f5c79ba497e01224ee7737937930c448e4d0e24dc", size = 20556, upload-time = "2025-06-24T04:21:06.073Z" }, -] - [[package]] name = "pyyaml" version = "6.0.2" From b3c86904b4db05f05a5ba43eea91baea6f31f9c3 Mon Sep 17 00:00:00 2001 From: HugoOnghai Date: Thu, 7 Aug 2025 11:47:39 -0700 Subject: [PATCH 56/65] revert .gitignore and re-add lost legacy models file --- .gitignore | 15 +- legacy/mpcite/models.py | 319 ++++++++++++++++++++++++++++++++++++++++ 2 files changed, 322 insertions(+), 12 deletions(-) create mode 100644 legacy/mpcite/models.py diff --git a/.gitignore b/.gitignore index 6b641e9..8241d4c 100644 --- a/.gitignore +++ b/.gitignore @@ -183,9 +183,9 @@ cython_debug/ .abstra/ # Visual Studio Code -# Visual Studio Code specific template is maintained in a separate VisualStudioCode.gitignore +# Visual Studio Code specific template is maintained in a separate VisualStudioCode.gitignore # that can be found at https://github.com/github/gitignore/blob/main/Global/VisualStudioCode.gitignore -# and can be added to the global gitignore or merged into this file. However, if you prefer, +# and can be added to the global gitignore or merged into this file. However, if you prefer, # you could uncomment the following to ignore the entire vscode folder .vscode/ @@ -210,15 +210,6 @@ __marimo__/ # Streamlit .streamlit/secrets.toml -<<<<<<< HEAD json_pages/ notebooks/ -test_json_pages/ -======= -# json files for storing production records -*.json -.env -/json_pages -/notebooks -/test_json_pages ->>>>>>> b991f09 (New Branch for Linting Workflow) +test_json_pages/ \ No newline at end of file diff --git a/legacy/mpcite/models.py b/legacy/mpcite/models.py new file mode 100644 index 0000000..b2fab65 --- /dev/null +++ b/legacy/mpcite/models.py @@ -0,0 +1,319 @@ +from pydantic import BaseModel, Field +from typing import List, Dict, Optional +from datetime import datetime +from enum import Enum +import bibtexparser + + +class ConnectionModel(BaseModel): + endpoint: str = Field(..., title="URL Endpoint of the connection") + username: str = Field(..., title="User Name") + password: str = Field(..., title="Password") + + +class RoboCrysModel(BaseModel): + material_id: str + last_updated: datetime + description: Optional[str] = None + error: Optional[str] = None + + @classmethod + def get_default_description(cls): + return ( + "Computed materials data using density " + "functional theory calculations. These calculations determine " + "the electronic structure of bulk materials by solving " + "approximations to the Schrodinger equation. For more " + "information, see https://materialsproject.org/docs/calculations" + ) + + +class MaterialModel(BaseModel): + last_updated: datetime = Field( + None, title="timestamp for the most recent calculation" + ) + updated_at: datetime = Field(None, title="alternative to last_updated") + created_at: datetime = Field( + None, + description="creation time for this material defined by when the first structure " + "optimization calculation was run", + ) + task_id: str = Field( + "", title="task id for this material. Also called the material id" + ) + # pretty_formula: str = Field(..., title="clean representation of the formula") + pretty_formula: str = Field(..., title="clean representation of the formula") + chemsys: str + + +class ELinkGetResponseModel(BaseModel): + osti_id: Optional[str] = Field(...) + dataset_type: str = Field(default="SM") + title: str = Field(...) + creators: str = Field(default="Kristin Persson") # replace with authors + contributors: List[Dict[str, str]] = Field( + default=[{"first_name": "Materials", "last_name": "Project"}], + description="List of Dict of first name, last name mapping", + ) # no contributor + product_nos: str = Field(..., title="MP id") + accession_num: str = Field(..., title="MP id") + contract_nos: str = Field("AC02-05CH11231; EDCBEE") + originating_research_org: str = Field( + default="Lawrence Berkeley National Laboratory (LBNL), Berkeley, CA (United States)" + ) + publication_date: str = Field(...) + language: str = Field(default="English") + country: str = Field(default="US") + sponsor_org: str = Field( + default="USDOE Office of Science (SC), Basic Energy Sciences (BES) (SC-22)" + ) + site_url: str = Field(...) + contact_name: str = Field(default="Kristin Persson") + contact_org: str = Field(default="LBNL") + contact_email: str = Field(default="feedback@materialsproject.org") + contact_phone: str = Field(default="+1(510)486-7218") + related_resource: str = Field("https://materialsproject.org/citing") + contributor_organizations: str = Field(default="MIT; UC Berkeley; Duke; U Louvain") + subject_categories_code: str = Field(default="36 MATERIALS SCIENCE") + keywords: str = Field(...) + description: str = Field(default="") + doi: dict = Field( + {}, title="DOI info", description="Mainly used during GET request" + ) + + @classmethod + def get_title(cls, material: MaterialModel): + formula = material.pretty_formula + return "Materials Data on %s by Materials Project" % formula + + @classmethod + def get_site_url(cls, mp_id): + return "https://materialsproject.org/materials/%s" % mp_id + + @classmethod + def get_keywords(cls, material): + keywords = "; ".join( + ["crystal structure", material.pretty_formula, material.chemsys] + ) + return keywords + + @classmethod + def get_default_description(cls): + return ( + "Computed materials data using density " + "functional theory calculations. These calculations determine " + "the electronic structure of bulk materials by solving " + "approximations to the Schrodinger equation. For more " + "information, see https://materialsproject.org/docs/calculations" + ) + + @classmethod + def custom_to_dict(cls, elink_record) -> dict: + if elink_record.osti_id is None or elink_record.osti_id == "": + return elink_record.dict(exclude={"osti_id", "doi"}) + else: + return elink_record.dict(exclude={"doi"}) + + +class ElinkResponseStatusEnum(Enum): + SUCCESS = "SUCCESS" + FAILED = "FAILURE" + + +class ELinkPostResponseModel(BaseModel): + osti_id: str + accession_num: str + product_nos: str + title: str + contract_nos: str + other_identifying_nos: Optional[str] + doi: Dict[str, str] + status: ElinkResponseStatusEnum + status_message: Optional[str] + + def generate_doi_record(self): + doi_collection_record = DOIRecordModel( + material_id=self.accession_num, + doi=self.doi["#text"], + status=self.doi["@status"], + bibtex=None, + valid=True, + last_validated_on=datetime.now(), + ) + doi_collection_record.set_status(status=self.doi["@status"]) + doi_collection_record.last_validated_on = datetime.now() + return doi_collection_record + + +class DOIRecordStatusEnum(str, Enum): + COMPLETED = "COMPLETED" + PENDING = "PENDING" + FAILURE = "FAILURE" + INIT = "INIT" + + +class DOIRecordModel(BaseModel): + material_id: str = Field(...) + doi: str = Field(default="") + bibtex: Optional[str] = None + status: DOIRecordStatusEnum + valid: bool = Field(False) + last_updated: datetime = Field( + default=datetime.now(), + title="DOI last updated time.", + description="Last updated is defined as either a Bibtex or status change.", + ) + created_at: datetime = Field( + default=datetime.now(), + title="DOI Created At", + description="creation time for this DOI record", + ) + last_validated_on: datetime = Field( + default=datetime.now(), + title="Date Last Validated", + description="Date that this data is last validated, " "not necessarily updated", + ) + elsevier_updated_on: datetime = Field( + default=datetime.now(), + title="Date Elsevier is updated", + description="If None, means never uploaded to elsevier", + ) + error: Optional[str] = Field( + default=None, description="None if no error, else error message" + ) + + class Config: + use_enum_values = True + + def set_status(self, status): + self.status = status + + def get_osti_id(self): + if self.doi is None or self.doi == "": + return "" + else: + return self.doi.split("/")[-1] + + def get_bibtex_abstract(self): + try: + if self.bibtex is None: + return "" + bib_db: bibtexparser.bibdatabase.BibDatabase = bibtexparser.loads( + self.bibtex + ) + if bib_db.entries: + return bib_db.entries[0]["abstractnote"] + except Exception as e: + print(e) + return "" + + +class OSTIDOIRecordModel(DOIRecordModel): + material_id: str = Field(...) + doi: str = Field(default="") + bibtex: Optional[str] = None + valid: bool = Field(False) + last_updated: datetime = Field( + default=datetime.now(), + title="DOI last updated time.", + description="Last updated is defined as either a Bibtex or status change.", + ) + + +class ElsevierPOSTContainerModel(BaseModel): + identifier: str = Field(default="", title="mp_id") + source: str = "MATERIALS_PROJECT" + date: str = datetime.now().date().isoformat().__str__() + title: str + description: str = "" + doi: str + authors: List[str] = ["Kristin Persson"] + url: str + type: str = "dataset" + dateAvailable: str = datetime.now().date().isoformat().__str__() + dateCreated: str = datetime.now().date().isoformat().__str__() + version: str = "1.0.0" + funding: str = "USDOE Office of Science (SC), Basic Energy Sciences (BES) (SC-22)" + language: str = "en" + method: str = "Materials Project" + accessRights: str = "Public" + contact: str = "Kristin Persson " + dataStandard: str = "https://materialsproject.org/citing" + howToCite: str = "https://materialsproject.org/citing" + subjectAreas: List[str] = ["36 MATERIALS SCIENCE"] + keywords: List[str] + institutions: List[str] = ["Lawrence Berkeley National Laboratory"] + institutionIds: List[str] = ["AC02-05CH11231; EDCBEE"] + spatialCoverage: List[str] = [] + temporalCoverage: List[str] = [] + references: List[str] = ["https://materialsproject.org/citing"] + relatedResources: List[str] = ["https://materialsproject.org/citing"] + location: str = "1 Cyclotron Rd, Berkeley, CA 94720" + childContainerIds: List[str] = [] + + @classmethod + def get_url(cls, mp_id): + return "https://materialsproject.org/materials/%s" % mp_id + + @classmethod + def get_keywords(cls, material: MaterialModel): + return ["crystal structure", material.pretty_formula, material.chemsys] + + @classmethod + def get_default_description(cls): + return ( + "Computed materials data using density " + "functional theory calculations. These calculations determine " + "the electronic structure of bulk materials by solving " + "approximations to the Schrodinger equation. For more " + "information, see https://materialsproject.org/docs/calculations" + ) + + @classmethod + def get_date_created(cls, material: MaterialModel) -> str: + return material.created_at.date().__str__() + + @classmethod + def get_date_available(cls, material: MaterialModel) -> str: + return material.created_at.date().__str__() + + @classmethod + def get_title(cls, material: MaterialModel) -> str: + return material.pretty_formula + + @classmethod + def from_material_model(cls, material: MaterialModel, doi: str, description: str): + model = ElsevierPOSTContainerModel( + identifier=material.task_id, + title=material.pretty_formula, + doi=doi, + url="https://materialsproject.org/materials/%s" % material.task_id, + keywords=["crystal structure", material.pretty_formula, material.chemsys], + date=datetime.now().date().__str__(), + dateCreated=material.created_at.date().__str__(), + dateAvailable=ElsevierPOSTContainerModel.get_date_available(material), + description=description, + ) + return model + + +class ExplorerGetJSONResponseModel(BaseModel): + osti_id: str + title: str + report_number: str + doi: str + product_type: str + language: str + country_publication: str + description: str + site_ownership_code: str + publication_date: str + entry_date: str + contributing_organizations: str + authors: List[str] + subjects: List[str] + contributing_org: str + doe_contract_number: str + sponsor_orgs: List[str] + research_orgs: List[str] + links: List[Dict[str, str]] From 37acee3e1fe33354ca0483037a3557cdadcf16e6 Mon Sep 17 00:00:00 2001 From: HugoOnghai Date: Thu, 7 Aug 2025 14:07:41 -0700 Subject: [PATCH 57/65] Trying to get the testing-suite running again, there have been changes to ElinkAPI since last testing debug..." --- .gitignore | 10 +++++--- pyproject.toml | 1 + src/mp_cite/core.py | 21 ++++++++++------- src/mp_cite/models.py | 7 +++--- tests/elinkapi_test.py | 39 ++++++++++++------------------- uv.lock | 53 +++++++++++++++++++++++++----------------- 6 files changed, 71 insertions(+), 60 deletions(-) diff --git a/.gitignore b/.gitignore index 8241d4c..c84ff87 100644 --- a/.gitignore +++ b/.gitignore @@ -210,6 +210,10 @@ __marimo__/ # Streamlit .streamlit/secrets.toml -json_pages/ -notebooks/ -test_json_pages/ \ No newline at end of file +# json files for storing production records +*.json +.env + +/json_pages +/notebooks +/test_json_pages diff --git a/pyproject.toml b/pyproject.toml index e93ed31..d589bfb 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -17,6 +17,7 @@ dependencies = [ "elinkapi>=0.4.9", "pydantic>=2.11.7", "pymongo>=4.13.2", + "python-dotenv>=1.1.1", ] [dependency-groups] diff --git a/src/mp_cite/core.py b/src/mp_cite/core.py index 38be7c1..df92ea7 100644 --- a/src/mp_cite/core.py +++ b/src/mp_cite/core.py @@ -19,7 +19,7 @@ from elinkapi.utils import Validation -from models import MinimumDARecord +from .models import MinimumDARecord from typing import Literal >>>>>>> 0346ff4 (Removed extra files in tests/ and updated email on pyproject.toml) @@ -81,7 +81,10 @@ def find_out_of_date_doi_entries( def update_existing_osti_record( - elinkapi: Elink, osti_id: OstiID, new_values: dict + elinkapi: Elink, + osti_id: OstiID, + new_values: dict, + new_state: Literal["save", "submit"], ) -> RecordResponse: """ update_existing_osti_record allows users to provide a dictionary of keywords and new values, which will replace the old values under the same keywords in the record with the given osti id @@ -104,14 +107,16 @@ def update_existing_osti_record( Instead, we leave this for the user. """ - record_on_elink = elinkapi.get_single_record(osti_id) + # record_on_elink = elinkapi.get_single_record(osti_id) + + # for keyword in new_values: + # setattr(record_on_elink, keyword, new_values[keyword]) - for keyword in new_values: - setattr(record_on_elink, keyword, new_values[keyword]) + # return elinkapi.update_record( + # osti_id, record_on_elink, state="save" + # ) # user should use update_state_of_osti_record to submit instead - return elinkapi.update_record( - osti_id, record_on_elink, state="save" - ) # user should use update_state_of_osti_record to submit instead + elinkapi.patch_record(osti_id, new_values, new_state) def submit_new_osti_record( diff --git a/src/mp_cite/models.py b/src/mp_cite/models.py index 8bbaf74..e707b76 100644 --- a/src/mp_cite/models.py +++ b/src/mp_cite/models.py @@ -1,10 +1,9 @@ from pydantic import BaseModel, Field, model_validator -from datetime import datetime +from datetime import datetime, timezone, date from elinkapi import Record, Organization, Person from typing import List, Any -import pytz class DOIModel(BaseModel): @@ -78,7 +77,7 @@ class MinimumDARecord(Record): ) site_ownership_code: str = Field(default="LBNL-MP") access_limitations: List[str] = Field(default_factory=lambda: ["UNL"]) - publication_date: datetime = Field( - default_factory=lambda: datetime.now(tz=pytz.UTC) + publication_date: date = Field( + default_factory=lambda: datetime.now(timezone.utc).date() ) site_url: str = Field(default="https://next-gen.materialsproject.org/materials") diff --git a/tests/elinkapi_test.py b/tests/elinkapi_test.py index 2f4eae9..22e4c3b 100644 --- a/tests/elinkapi_test.py +++ b/tests/elinkapi_test.py @@ -1,5 +1,6 @@ import pytest from elinkapi import Elink, Record, exceptions +from elinkapi.record import RecordResponse import sys import os @@ -8,9 +9,9 @@ from datetime import datetime sys.path.insert(0, os.path.abspath(os.path.join(os.path.dirname(__file__), ".."))) -from src.mp_cite.core import ( - make_minimum_record_to_fully_release, -) # cannot find a good workaround for this... +from src.mp_cite.models import ( + MinimumDARecord, +) # cannot find a good workaround for this with relative importing... load_dotenv() @@ -129,18 +130,8 @@ def elink_production_client(): # RECORD ENDPOINTS # Post a new Record @pytest.fixture -def test_post_new_record(elink_review_client): - record_to_post = make_minimum_record_to_fully_release( - title="Test Post Record - PyTest" - ) - # try: - # saved_record = elink_review_client.post_new_record(record_to_post, "save") # Works - saved - # except exceptions.ForbiddenException as fe: - # pytest.fail(f"Forbidden: Check API key or permissions associated with provided API key. {fe}") - # except exceptions.BadRequestException as ve: - # pytest.fail(f"Bad Request: Possibly incorrect parameters. {ve}") - # except Exception as e: - # pytest.fail(f"Unexpected error: {e}") +def test_post_new_record(elink_review_client) -> RecordResponse: + record_to_post = MinimumDARecord(title="Test Post Record - PyTest") try: submitted_record = elink_review_client.post_new_record( @@ -158,9 +149,6 @@ def test_post_new_record(elink_review_client): def test_get_new_single_record(test_post_new_record): - # record_to_post = make_minimum_record_to_fully_release(title="Test Getting New Single Record - PyTest") - # submitted_record = elink_review_client.post_new_record(record_to_post, "submit") - posted_record = test_post_new_record elink_review_api_key = os.getenv("elink_review_api_token") @@ -204,18 +192,21 @@ def test_update_record(test_post_new_record): # Update an existing Record elink_review_client.update_record( osti_id, - make_minimum_record_to_fully_release("Test Updating Record - PyTest"), + MinimumDARecord(title="Test Updating Record - PyTest"), "submit", ) # works # Get Revision based on revision number elink_review_client.get_revision_by_number(osti_id, revision_number) # works - # Get Revision based on date Currently Not Working...? - # revision_by_date = elink_review_client.get_revision_by_date(osti_id, date.strftime("%Y-%d-%m")) # works + + # as of 8/7/2025, elinkapi 0.5.1, these get_all_revisions() calls have stopped working)... + # elink_prod_client = Elink(token=os.getenv("elink_api_PRODUCTION_key")) + # print(elink_prod_client.get_all_revisions(1758063)) + # Get all RevisionHistory of a Record - revision_history = elink_review_client.get_all_revisions(osti_id) # works - revision_history[0] - revision_history[-1] + # revision_history = elink_review_client.get_all_revisions(osti_id) # works + # revision_history[0] + # revision_history[-1] # # MEDIA ENDPOINTS # # Associate new Media with a Record diff --git a/uv.lock b/uv.lock index e6682a7..970822d 100644 --- a/uv.lock +++ b/uv.lock @@ -303,6 +303,7 @@ dependencies = [ { name = "elinkapi" }, { name = "pydantic" }, { name = "pymongo" }, + { name = "python-dotenv" }, ] [package.dev-dependencies] @@ -323,6 +324,7 @@ requires-dist = [ { name = "elinkapi", specifier = ">=0.4.9" }, { name = "pydantic", specifier = ">=2.11.7" }, { name = "pymongo", specifier = ">=4.13.2" }, + { name = "python-dotenv", specifier = ">=1.1.1" }, ] [package.metadata.requires-dev] @@ -608,6 +610,15 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/ec/57/56b9bcc3c9c6a792fcbaf139543cee77261f3651ca9da0c93f5c1221264b/python_dateutil-2.9.0.post0-py2.py3-none-any.whl", hash = "sha256:a8b2bc7bffae282281c8140a97d3aa9c14da0b136dfe83f850eea9a5f7470427", size = 229892, upload-time = "2024-03-01T18:36:18.57Z" }, ] +[[package]] +name = "python-dotenv" +version = "1.1.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/f6/b0/4bc07ccd3572a2f9df7e6782f52b0c6c90dcbb803ac4a167702d7d0dfe1e/python_dotenv-1.1.1.tar.gz", hash = "sha256:a8a6399716257f45be6a007360200409fce5cda2661e3dec71d23dc15f6189ab", size = 41978, upload-time = "2025-06-24T04:21:07.341Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/5f/ed/539768cf28c661b5b068d66d96a2f155c4971a5d55684a514c1a0e0dec2f/python_dotenv-1.1.1-py3-none-any.whl", hash = "sha256:31f23644fe2602f88ff55e1f5c79ba497e01224ee7737937930c448e4d0e24dc", size = 20556, upload-time = "2025-06-24T04:21:06.073Z" }, +] + [[package]] name = "pyyaml" version = "6.0.2" @@ -684,27 +695,27 @@ wheels = [ [[package]] name = "ruff" -version = "0.12.7" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/a1/81/0bd3594fa0f690466e41bd033bdcdf86cba8288345ac77ad4afbe5ec743a/ruff-0.12.7.tar.gz", hash = "sha256:1fc3193f238bc2d7968772c82831a4ff69252f673be371fb49663f0068b7ec71", size = 5197814, upload-time = "2025-07-29T22:32:35.877Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/e1/d2/6cb35e9c85e7a91e8d22ab32ae07ac39cc34a71f1009a6f9e4a2a019e602/ruff-0.12.7-py3-none-linux_armv6l.whl", hash = "sha256:76e4f31529899b8c434c3c1dede98c4483b89590e15fb49f2d46183801565303", size = 11852189, upload-time = "2025-07-29T22:31:41.281Z" }, - { url = "https://files.pythonhosted.org/packages/63/5b/a4136b9921aa84638f1a6be7fb086f8cad0fde538ba76bda3682f2599a2f/ruff-0.12.7-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:789b7a03e72507c54fb3ba6209e4bb36517b90f1a3569ea17084e3fd295500fb", size = 12519389, upload-time = "2025-07-29T22:31:54.265Z" }, - { url = "https://files.pythonhosted.org/packages/a8/c9/3e24a8472484269b6b1821794141f879c54645a111ded4b6f58f9ab0705f/ruff-0.12.7-py3-none-macosx_11_0_arm64.whl", hash = "sha256:2e1c2a3b8626339bb6369116e7030a4cf194ea48f49b64bb505732a7fce4f4e3", size = 11743384, upload-time = "2025-07-29T22:31:59.575Z" }, - { url = "https://files.pythonhosted.org/packages/26/7c/458dd25deeb3452c43eaee853c0b17a1e84169f8021a26d500ead77964fd/ruff-0.12.7-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:32dec41817623d388e645612ec70d5757a6d9c035f3744a52c7b195a57e03860", size = 11943759, upload-time = "2025-07-29T22:32:01.95Z" }, - { url = "https://files.pythonhosted.org/packages/7f/8b/658798472ef260ca050e400ab96ef7e85c366c39cf3dfbef4d0a46a528b6/ruff-0.12.7-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:47ef751f722053a5df5fa48d412dbb54d41ab9b17875c6840a58ec63ff0c247c", size = 11654028, upload-time = "2025-07-29T22:32:04.367Z" }, - { url = "https://files.pythonhosted.org/packages/a8/86/9c2336f13b2a3326d06d39178fd3448dcc7025f82514d1b15816fe42bfe8/ruff-0.12.7-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a828a5fc25a3efd3e1ff7b241fd392686c9386f20e5ac90aa9234a5faa12c423", size = 13225209, upload-time = "2025-07-29T22:32:06.952Z" }, - { url = "https://files.pythonhosted.org/packages/76/69/df73f65f53d6c463b19b6b312fd2391dc36425d926ec237a7ed028a90fc1/ruff-0.12.7-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:5726f59b171111fa6a69d82aef48f00b56598b03a22f0f4170664ff4d8298efb", size = 14182353, upload-time = "2025-07-29T22:32:10.053Z" }, - { url = "https://files.pythonhosted.org/packages/58/1e/de6cda406d99fea84b66811c189b5ea139814b98125b052424b55d28a41c/ruff-0.12.7-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:74e6f5c04c4dd4aba223f4fe6e7104f79e0eebf7d307e4f9b18c18362124bccd", size = 13631555, upload-time = "2025-07-29T22:32:12.644Z" }, - { url = "https://files.pythonhosted.org/packages/6f/ae/625d46d5164a6cc9261945a5e89df24457dc8262539ace3ac36c40f0b51e/ruff-0.12.7-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5d0bfe4e77fba61bf2ccadf8cf005d6133e3ce08793bbe870dd1c734f2699a3e", size = 12667556, upload-time = "2025-07-29T22:32:15.312Z" }, - { url = "https://files.pythonhosted.org/packages/55/bf/9cb1ea5e3066779e42ade8d0cd3d3b0582a5720a814ae1586f85014656b6/ruff-0.12.7-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:06bfb01e1623bf7f59ea749a841da56f8f653d641bfd046edee32ede7ff6c606", size = 12939784, upload-time = "2025-07-29T22:32:17.69Z" }, - { url = "https://files.pythonhosted.org/packages/55/7f/7ead2663be5627c04be83754c4f3096603bf5e99ed856c7cd29618c691bd/ruff-0.12.7-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:e41df94a957d50083fd09b916d6e89e497246698c3f3d5c681c8b3e7b9bb4ac8", size = 11771356, upload-time = "2025-07-29T22:32:20.134Z" }, - { url = "https://files.pythonhosted.org/packages/17/40/a95352ea16edf78cd3a938085dccc55df692a4d8ba1b3af7accbe2c806b0/ruff-0.12.7-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:4000623300563c709458d0ce170c3d0d788c23a058912f28bbadc6f905d67afa", size = 11612124, upload-time = "2025-07-29T22:32:22.645Z" }, - { url = "https://files.pythonhosted.org/packages/4d/74/633b04871c669e23b8917877e812376827c06df866e1677f15abfadc95cb/ruff-0.12.7-py3-none-musllinux_1_2_i686.whl", hash = "sha256:69ffe0e5f9b2cf2b8e289a3f8945b402a1b19eff24ec389f45f23c42a3dd6fb5", size = 12479945, upload-time = "2025-07-29T22:32:24.765Z" }, - { url = "https://files.pythonhosted.org/packages/be/34/c3ef2d7799c9778b835a76189c6f53c179d3bdebc8c65288c29032e03613/ruff-0.12.7-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:a07a5c8ffa2611a52732bdc67bf88e243abd84fe2d7f6daef3826b59abbfeda4", size = 12998677, upload-time = "2025-07-29T22:32:27.022Z" }, - { url = "https://files.pythonhosted.org/packages/77/ab/aca2e756ad7b09b3d662a41773f3edcbd262872a4fc81f920dc1ffa44541/ruff-0.12.7-py3-none-win32.whl", hash = "sha256:c928f1b2ec59fb77dfdf70e0419408898b63998789cc98197e15f560b9e77f77", size = 11756687, upload-time = "2025-07-29T22:32:29.381Z" }, - { url = "https://files.pythonhosted.org/packages/b4/71/26d45a5042bc71db22ddd8252ca9d01e9ca454f230e2996bb04f16d72799/ruff-0.12.7-py3-none-win_amd64.whl", hash = "sha256:9c18f3d707ee9edf89da76131956aba1270c6348bfee8f6c647de841eac7194f", size = 12912365, upload-time = "2025-07-29T22:32:31.517Z" }, - { url = "https://files.pythonhosted.org/packages/4c/9b/0b8aa09817b63e78d94b4977f18b1fcaead3165a5ee49251c5d5c245bb2d/ruff-0.12.7-py3-none-win_arm64.whl", hash = "sha256:dfce05101dbd11833a0776716d5d1578641b7fddb537fe7fa956ab85d1769b69", size = 11982083, upload-time = "2025-07-29T22:32:33.881Z" }, +version = "0.12.8" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/4b/da/5bd7565be729e86e1442dad2c9a364ceeff82227c2dece7c29697a9795eb/ruff-0.12.8.tar.gz", hash = "sha256:4cb3a45525176e1009b2b64126acf5f9444ea59066262791febf55e40493a033", size = 5242373, upload-time = "2025-08-07T19:05:47.268Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/c9/1e/c843bfa8ad1114fab3eb2b78235dda76acd66384c663a4e0415ecc13aa1e/ruff-0.12.8-py3-none-linux_armv6l.whl", hash = "sha256:63cb5a5e933fc913e5823a0dfdc3c99add73f52d139d6cd5cc8639d0e0465513", size = 11675315, upload-time = "2025-08-07T19:05:06.15Z" }, + { url = "https://files.pythonhosted.org/packages/24/ee/af6e5c2a8ca3a81676d5480a1025494fd104b8896266502bb4de2a0e8388/ruff-0.12.8-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:9a9bbe28f9f551accf84a24c366c1aa8774d6748438b47174f8e8565ab9dedbc", size = 12456653, upload-time = "2025-08-07T19:05:09.759Z" }, + { url = "https://files.pythonhosted.org/packages/99/9d/e91f84dfe3866fa648c10512904991ecc326fd0b66578b324ee6ecb8f725/ruff-0.12.8-py3-none-macosx_11_0_arm64.whl", hash = "sha256:2fae54e752a3150f7ee0e09bce2e133caf10ce9d971510a9b925392dc98d2fec", size = 11659690, upload-time = "2025-08-07T19:05:12.551Z" }, + { url = "https://files.pythonhosted.org/packages/fe/ac/a363d25ec53040408ebdd4efcee929d48547665858ede0505d1d8041b2e5/ruff-0.12.8-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c0acbcf01206df963d9331b5838fb31f3b44fa979ee7fa368b9b9057d89f4a53", size = 11896923, upload-time = "2025-08-07T19:05:14.821Z" }, + { url = "https://files.pythonhosted.org/packages/58/9f/ea356cd87c395f6ade9bb81365bd909ff60860975ca1bc39f0e59de3da37/ruff-0.12.8-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ae3e7504666ad4c62f9ac8eedb52a93f9ebdeb34742b8b71cd3cccd24912719f", size = 11477612, upload-time = "2025-08-07T19:05:16.712Z" }, + { url = "https://files.pythonhosted.org/packages/1a/46/92e8fa3c9dcfd49175225c09053916cb97bb7204f9f899c2f2baca69e450/ruff-0.12.8-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cb82efb5d35d07497813a1c5647867390a7d83304562607f3579602fa3d7d46f", size = 13182745, upload-time = "2025-08-07T19:05:18.709Z" }, + { url = "https://files.pythonhosted.org/packages/5e/c4/f2176a310f26e6160deaf661ef60db6c3bb62b7a35e57ae28f27a09a7d63/ruff-0.12.8-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:dbea798fc0065ad0b84a2947b0aff4233f0cb30f226f00a2c5850ca4393de609", size = 14206885, upload-time = "2025-08-07T19:05:21.025Z" }, + { url = "https://files.pythonhosted.org/packages/87/9d/98e162f3eeeb6689acbedbae5050b4b3220754554526c50c292b611d3a63/ruff-0.12.8-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:49ebcaccc2bdad86fd51b7864e3d808aad404aab8df33d469b6e65584656263a", size = 13639381, upload-time = "2025-08-07T19:05:23.423Z" }, + { url = "https://files.pythonhosted.org/packages/81/4e/1b7478b072fcde5161b48f64774d6edd59d6d198e4ba8918d9f4702b8043/ruff-0.12.8-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0ac9c570634b98c71c88cb17badd90f13fc076a472ba6ef1d113d8ed3df109fb", size = 12613271, upload-time = "2025-08-07T19:05:25.507Z" }, + { url = "https://files.pythonhosted.org/packages/e8/67/0c3c9179a3ad19791ef1b8f7138aa27d4578c78700551c60d9260b2c660d/ruff-0.12.8-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:560e0cd641e45591a3e42cb50ef61ce07162b9c233786663fdce2d8557d99818", size = 12847783, upload-time = "2025-08-07T19:05:28.14Z" }, + { url = "https://files.pythonhosted.org/packages/4e/2a/0b6ac3dd045acf8aa229b12c9c17bb35508191b71a14904baf99573a21bd/ruff-0.12.8-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:71c83121512e7743fba5a8848c261dcc454cafb3ef2934a43f1b7a4eb5a447ea", size = 11702672, upload-time = "2025-08-07T19:05:30.413Z" }, + { url = "https://files.pythonhosted.org/packages/9d/ee/f9fdc9f341b0430110de8b39a6ee5fa68c5706dc7c0aa940817947d6937e/ruff-0.12.8-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:de4429ef2ba091ecddedd300f4c3f24bca875d3d8b23340728c3cb0da81072c3", size = 11440626, upload-time = "2025-08-07T19:05:32.492Z" }, + { url = "https://files.pythonhosted.org/packages/89/fb/b3aa2d482d05f44e4d197d1de5e3863feb13067b22c571b9561085c999dc/ruff-0.12.8-py3-none-musllinux_1_2_i686.whl", hash = "sha256:a2cab5f60d5b65b50fba39a8950c8746df1627d54ba1197f970763917184b161", size = 12462162, upload-time = "2025-08-07T19:05:34.449Z" }, + { url = "https://files.pythonhosted.org/packages/18/9f/5c5d93e1d00d854d5013c96e1a92c33b703a0332707a7cdbd0a4880a84fb/ruff-0.12.8-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:45c32487e14f60b88aad6be9fd5da5093dbefb0e3e1224131cb1d441d7cb7d46", size = 12913212, upload-time = "2025-08-07T19:05:36.541Z" }, + { url = "https://files.pythonhosted.org/packages/71/13/ab9120add1c0e4604c71bfc2e4ef7d63bebece0cfe617013da289539cef8/ruff-0.12.8-py3-none-win32.whl", hash = "sha256:daf3475060a617fd5bc80638aeaf2f5937f10af3ec44464e280a9d2218e720d3", size = 11694382, upload-time = "2025-08-07T19:05:38.468Z" }, + { url = "https://files.pythonhosted.org/packages/f6/dc/a2873b7c5001c62f46266685863bee2888caf469d1edac84bf3242074be2/ruff-0.12.8-py3-none-win_amd64.whl", hash = "sha256:7209531f1a1fcfbe8e46bcd7ab30e2f43604d8ba1c49029bb420b103d0b5f76e", size = 12740482, upload-time = "2025-08-07T19:05:40.391Z" }, + { url = "https://files.pythonhosted.org/packages/cb/5c/799a1efb8b5abab56e8a9f2a0b72d12bd64bb55815e9476c7d0a2887d2f7/ruff-0.12.8-py3-none-win_arm64.whl", hash = "sha256:c90e1a334683ce41b0e7a04f41790c429bf5073b62c1ae701c9dc5b3d14f0749", size = 11884718, upload-time = "2025-08-07T19:05:42.866Z" }, ] [[package]] From 2875232332b8513e6723acbc0e3944f6179d17ad Mon Sep 17 00:00:00 2001 From: HugoOnghai Date: Mon, 11 Aug 2025 11:12:02 -0700 Subject: [PATCH 58/65] Finished test_core.py, found two issues with updating via patch and updating only state to submit --- pyproject.toml | 2 +- test_media_files/another_media_file.txt | 7 -- test_media_files/best_media_file.txt | 3 - test_media_files/media_file.txt | 1 - tests/conf_test.py | 30 +---- tests/elink_service_test.py | 48 +++++-- tests/elinkapi_test.py | 159 ++++++++++++------------ tests/test_core.py | 123 ++++++++++++++++++ tests/test_elink_api.py | 149 ++++++++++++++++++++++ uv.lock | 8 +- 10 files changed, 392 insertions(+), 138 deletions(-) delete mode 100644 test_media_files/another_media_file.txt delete mode 100644 test_media_files/best_media_file.txt delete mode 100644 test_media_files/media_file.txt create mode 100644 tests/test_core.py create mode 100644 tests/test_elink_api.py diff --git a/pyproject.toml b/pyproject.toml index d589bfb..36d9b87 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -14,7 +14,7 @@ maintainers = [ ] requires-python = ">=3.11" dependencies = [ - "elinkapi>=0.4.9", + "elinkapi>=0.5.2", "pydantic>=2.11.7", "pymongo>=4.13.2", "python-dotenv>=1.1.1", diff --git a/test_media_files/another_media_file.txt b/test_media_files/another_media_file.txt deleted file mode 100644 index 9a64dd0..0000000 --- a/test_media_files/another_media_file.txt +++ /dev/null @@ -1,7 +0,0 @@ -WOWWWWWWWWWWWW - - - - - -O \ No newline at end of file diff --git a/test_media_files/best_media_file.txt b/test_media_files/best_media_file.txt deleted file mode 100644 index c708781..0000000 --- a/test_media_files/best_media_file.txt +++ /dev/null @@ -1,3 +0,0 @@ -Not actually the best - -Sue Me \ No newline at end of file diff --git a/test_media_files/media_file.txt b/test_media_files/media_file.txt deleted file mode 100644 index c9d49e9..0000000 --- a/test_media_files/media_file.txt +++ /dev/null @@ -1 +0,0 @@ -This is a media file. The text is here. that is all. \ No newline at end of file diff --git a/tests/conf_test.py b/tests/conf_test.py index 4a25e36..2b198d4 100644 --- a/tests/conf_test.py +++ b/tests/conf_test.py @@ -1,6 +1,6 @@ import os import pytest -from elinkapi import Elink, exceptions +from elinkapi import Elink from dotenv import load_dotenv load_dotenv() @@ -25,31 +25,3 @@ def elink_production_client(): """ elink_prod_api_key = os.getenv("elink_api_PRODUCTION_key") return Elink(token=elink_prod_api_key) - - -def test_get_single_record(elink_production_client): - try: - record = elink_production_client.get_single_record(1190959) - assert record.title == "Materials Data on Si by Materials Project" - assert record.osti_id == 1190959 - except exceptions.ForbiddenException as fe: - pytest.fail( - f"Forbidden: Check API key or permissions associated with provided API key. {fe}" - ) - except exceptions.BadRequestException as ve: - pytest.fail(f"Bad Request: Possibly incorrect parameters. {ve}") - except Exception as e: - pytest.fail(f"Unexpected error: {e}") - - -def test_query_records(elink_production_client): - try: - elink_production_client.query_records() - except exceptions.ForbiddenException as fe: - pytest.fail( - f"Forbidden: Check API key or permissions associated with provided API key. {fe}" - ) - except exceptions.BadRequestException as ve: - pytest.fail(f"Bad Request: Possibly incorrect parameters. {ve}") - except Exception as e: - pytest.fail(f"Unexpected error: {e}") diff --git a/tests/elink_service_test.py b/tests/elink_service_test.py index 3e1c2b1..0a6515d 100644 --- a/tests/elink_service_test.py +++ b/tests/elink_service_test.py @@ -1,7 +1,6 @@ -from elinkapi import Elink +from elinkapi import exceptions from elinkapi.record import RecordResponse import pytest -import os from dotenv import load_dotenv load_dotenv() @@ -16,17 +15,44 @@ # 3. make sure record updates work # 4. deleting records? # 5+. test any other surfaces of the Elink api that we interact with -@pytest.fixture -def elink_review_client(): + + +def test_get_single_record(elink_production_client): + """ + tried to use the production client to retrieve a record. + """ + try: + record = elink_production_client.get_single_record(1190959) + assert record.title == "Materials Data on Si by Materials Project" + assert record.osti_id == 1190959 + except exceptions.ForbiddenException as fe: + pytest.fail( + f"Forbidden: Check API key or permissions associated with provided API key. {fe}" + ) + except exceptions.BadRequestException as ve: + pytest.fail(f"Bad Request: Possibly incorrect parameters. {ve}") + except Exception as e: + pytest.fail(f"Unexpected error: {e}") + + +def test_query_records(elink_production_client): """ - tests whether or not the elink review client can be properly retrieved. - returns the elink review client + tests the query functionality of the elinkapi on the production environment """ - elink_review_api_key = os.getenv("elink_review_api_token") - review_endpoint = os.getenv("ELINK_REVIEW_ENDPOINT") - return Elink(token=elink_review_api_key, target=review_endpoint) + try: + elink_production_client.query_records() + except exceptions.ForbiddenException as fe: + pytest.fail( + f"Forbidden: Check API key or permissions associated with provided API key. {fe}" + ) + except exceptions.BadRequestException as ve: + pytest.fail(f"Bad Request: Possibly incorrect parameters. {ve}") + except Exception as e: + pytest.fail(f"Unexpected error: {e}") -def test_elink_query(elink_review_client): - # placeholder, just to verify gh actions until full test suite is done +def test_query_exists(elink_review_client): + """ + tests to see that the query does in fact resolve entries in the form of RecordResponse objects. + """ assert isinstance(next(elink_review_client.query_records()), RecordResponse) diff --git a/tests/elinkapi_test.py b/tests/elinkapi_test.py index 22e4c3b..9253f97 100644 --- a/tests/elinkapi_test.py +++ b/tests/elinkapi_test.py @@ -1,5 +1,5 @@ import pytest -from elinkapi import Elink, Record, exceptions +from elinkapi import Record, exceptions from elinkapi.record import RecordResponse import sys @@ -8,17 +8,18 @@ from dotenv import load_dotenv from datetime import datetime + sys.path.insert(0, os.path.abspath(os.path.join(os.path.dirname(__file__), ".."))) from src.mp_cite.models import ( MinimumDARecord, ) # cannot find a good workaround for this with relative importing... +import src.mp_cite.core as core load_dotenv() - valid_save_json = { - "title": "Electron microscope data for photons", - "site_ownership_code": "LLNL", + "title": "Test Reserving DOI - PyTest", + "site_ownership_code": "LBNL-MP", "product_type": "TR", "description": "Hello, from teh other side", } @@ -87,32 +88,10 @@ "publication_date": "2018-02-21", "publication_date_text": "Winter 2012", "released_to_osti_date": "2023-03-03", - "site_ownership_code": "LBNL", + "site_ownership_code": "LBNL-MP", "title": "Sample document title", } - -@pytest.fixture -def elink_review_client(): - """ - tests whether or not the elink review client can be properly retrieved. - returns the elink review client - """ - elink_review_api_key = os.getenv("elink_review_api_token") - review_endpoint = os.getenv("ELINK_REVIEW_ENDPOINT") - return Elink(token=elink_review_api_key, target=review_endpoint) - - -@pytest.fixture -def elink_production_client(): - """ - tests whether or not the elink review client can be properly retrieved. - returns the elink review client - """ - elink_prod_api_key = os.getenv("elink_api_PRODUCTION_key") - return Elink(token=elink_prod_api_key) - - osti_id = "2300069" # osti_id = 2300063 media_id = "1900082" @@ -120,9 +99,6 @@ def elink_production_client(): revision_number = "2" date = datetime.now() state = "save" -file_path = "./test_media_files/media_file.txt" -file_path2 = "./test_media_files/best_media_file.txt" -file_path3 = "./test_media_files/another_media_file.txt" json_responses = [] reserved_osti_id = 1 @@ -148,85 +124,104 @@ def test_post_new_record(elink_review_client) -> RecordResponse: pytest.fail(f"Unexpected error: {e}") -def test_get_new_single_record(test_post_new_record): +def test_get_new_single_record(test_post_new_record, elink_review_client): posted_record = test_post_new_record - elink_review_api_key = os.getenv("elink_review_api_token") - review_endpoint = os.getenv("ELINK_REVIEW_ENDPOINT") - elink_review_client = Elink(token=elink_review_api_key, target=review_endpoint) - osti_id = test_post_new_record.osti_id single_record = elink_review_client.get_single_record(osti_id) - assert osti_id is not None - assert single_record.title == posted_record.title + try: + assert osti_id is not None + except Exception: + core.delete_osti_record(elink_review_client, osti_id, "Failed Test") + pytest.fail("Assertion failed!") + + try: + assert single_record.title == posted_record.title + except Exception: + core.delete_osti_record(elink_review_client, osti_id, "Failed Test") + pytest.fail("Assertion failed!") # assert single_record.organizations == record_to_post.organizations # this doesn't work because Elink's pydantic model defaults empty identifier to [], where as an empty identifier field is returned as None. # assert single_record.persons == record_to_post.persons # same issue as above^ - assert single_record.publication_date == posted_record.publication_date + + try: + assert single_record.publication_date == posted_record.publication_date + except Exception: + core.delete_osti_record(elink_review_client, osti_id, "Failed Test") + pytest.fail("Assertion failed!") + + core.delete_osti_record(elink_review_client, osti_id, "Completed Test") def test_invalid_query(elink_production_client): list_of_records = elink_production_client.query_records( title="Allo-ballo holla olah" - ) # works, nothing found + ) # works if nothing found assert list_of_records.total_rows == 0 # Reserve a DOI def test_reserve_DOI(elink_review_client): try: - elink_review_client.reserve_doi(Record(**valid_save_json)) # works - naved - except Exception: - print("failed to reserve doi on record") + rr = elink_review_client.reserve_doi(Record(**valid_save_json)) + except exceptions.ForbiddenException as fe: + core.delete_osti_record(elink_review_client, rr.osti_id, "Completed Test") + pytest.fail( + f"Forbidden: Check API key or permissions associated with provided API key. {fe}" + ) + except exceptions.BadRequestException as ve: + core.delete_osti_record(elink_review_client, rr.osti_id, "Completed Test") + pytest.fail(f"Bad Request: Possibly incorrect parameters. {ve}") + except Exception as e: + core.delete_osti_record(elink_review_client, rr.osti_id, "Completed Test") + pytest.fail(f"Unexpected error: {e}") + core.delete_osti_record(elink_review_client, rr.osti_id, "Completed Test") -def test_update_record(test_post_new_record): + +def test_update_record(test_post_new_record, elink_review_client): posted_record = test_post_new_record osti_id = posted_record.osti_id - elink_review_api_key = os.getenv("elink_review_api_token") - review_endpoint = os.getenv("ELINK_REVIEW_ENDPOINT") - elink_review_client = Elink(token=elink_review_api_key, target=review_endpoint) - # Update an existing Record - elink_review_client.update_record( - osti_id, - MinimumDARecord(title="Test Updating Record - PyTest"), - "submit", - ) # works + try: + elink_review_client.update_record( + osti_id, + MinimumDARecord(title="Test Updating Record - PyTest"), + "submit", + ) + except exceptions.ForbiddenException as fe: + core.delete_osti_record(elink_review_client, osti_id, "Completed Test") + pytest.fail( + f"Forbidden: Check API key or permissions associated with provided API key. {fe}" + ) + except exceptions.BadRequestException as ve: + core.delete_osti_record(elink_review_client, osti_id, "Completed Test") + pytest.fail(f"Bad Request: Possibly incorrect parameters. {ve}") + except Exception as e: + core.delete_osti_record(elink_review_client, osti_id, "Completed Test") + pytest.fail(f"Unexpected error: {e}") # Get Revision based on revision number - elink_review_client.get_revision_by_number(osti_id, revision_number) # works + try: + elink_review_client.get_revision_by_number(osti_id, revision_number) + except Exception: + core.delete_osti_record(elink_review_client, osti_id, "Completed Test") + pytest.fail( + f"Failed to get revision {revision_number} on record with OSTI ID: {osti_id}" + ) # as of 8/7/2025, elinkapi 0.5.1, these get_all_revisions() calls have stopped working)... - # elink_prod_client = Elink(token=os.getenv("elink_api_PRODUCTION_key")) - # print(elink_prod_client.get_all_revisions(1758063)) - # Get all RevisionHistory of a Record - # revision_history = elink_review_client.get_all_revisions(osti_id) # works - # revision_history[0] - # revision_history[-1] - - # # MEDIA ENDPOINTS - # # Associate new Media with a Record - # posted_media = elink_review_client.post_media(osti_id, file_path, {"title": "Title of the Media media_file.txt"}) - # posted_media3 = elink_review_client.post_media(osti_id, file_path3, {"title": "Title of the Media media_file.txt"}) - # media_id = posted_media.media_id - # # Replace existing Media on a Record - # replaced_media2 = elink_review_client.put_media(osti_id, media_id, file_path2, {"title": "Changed this title now"}) - # # Get Media associated with OSTI ID - # media = elink_review_client.get_media(osti_id) - # # Get Media content of a media resource - # media_content = elink_review_client.get_media_content(media_id) - # # Delete Media with media_id off of a Record - # isSuccessDelete = elink_review_client.delete_single_media(osti_id, media_id, reason) #works - # assert isSuccessDelete - # # Delete all Media associated with a Record - # isSuccessAllDelete = elink_review_client.delete_all_media(osti_id, reason) - # assert isSuccessAllDelete - - # # Should see that all media has been deleted - # final_media = elink_review_client.get_media(osti_id) - - # print("Finished") + try: + revision_history = elink_review_client.get_all_revisions(osti_id) # works + revision_history[0] + revision_history[-1] + except Exception: + core.delete_osti_record(elink_review_client, osti_id, "Completed Test") + pytest.fail( + f"Failed to get entire revision history of record with OSTI ID: {osti_id}" + ) + + core.delete_osti_record(elink_review_client, osti_id, "Completed Test") diff --git a/tests/test_core.py b/tests/test_core.py new file mode 100644 index 0000000..2f2be67 --- /dev/null +++ b/tests/test_core.py @@ -0,0 +1,123 @@ +import sys +import os + +sys.path.insert(0, os.path.abspath(os.path.join(os.path.dirname(__file__), ".."))) +import src.mp_cite.core as core + +import pytest + + +def test_update_existing_osti_record(elink_review_client): + record = core.submit_new_osti_record( + elink_review_client, + new_values={"title": "Test Update Existing OSTI Record | Pytest"}, + ) + osti_id = record.osti_id + date_old = record.date_metadata_updated + + try: + assert record.title == "Test Update Existing OSTI Record | Pytest" + assert record.workflow_status == "SO" + assert record.description is None + except Exception: + core.delete_osti_record( + elink_review_client, osti_id, "Unexpected submission..." + ) + pytest.fail("Failed submit new record as expected! Deleting test record...") + + try: + # state = "save" + # patch = { "description": "This is a new robocrys description" } + + # response = requests.patch(f"{elink_review_client.target}/records/{osti_id}/{state}", + # headers = { + # "Authorization" : f"Bearer {elink_review_client.token}", + # "Content-Type": "application/json" + # }, + # data=json.dumps(patch)) + # print("TEST TEST TEST RESPONSE: ", response, "\n", response.text) + + # if response.status_code == 400: + # core.delete_osti_record(elink_review_client, osti_id, "Test Failed!") + # pytest.fail("Failed to patch!") + + record = core.update_existing_osti_record( + elink_review_client, + osti_id, + {"description": "This is a new robocrys description"}, + new_state="save", + ) + + # elink_review_client.patch_record(osti_id, { """description""": """This is a new robocrys description""" }) + + assert record.workflow_status == "SA" + assert record.description == "This is a new robocrys description" + assert record.date_metadata_added > date_old + except Exception: + core.delete_osti_record(elink_review_client, osti_id, "Test Failed!") + pytest.fail("Failed to updated existing osti record! Deleting test record...") + + core.delete_osti_record(elink_review_client, osti_id, "Test Completed.") + + +def test_submit_new_osti_record(elink_review_client): + """ + Submits a record and then retrieves said submitted record. Checks that each keyword-value pair remains matching, since no updates/patches have been made. + """ + + record_submit = core.submit_new_osti_record( + elink_review_client, + new_values={"title": "Test Submit New OSTI Record | Pytest"}, + ) + osti_id = record_submit.osti_id + + record_got = elink_review_client.get_single_record(osti_id) + + for keyword, value in record_got: + if keyword == "workflow_status" or getattr(record_submit, keyword) == value: + # since the workflow_status of submitted osti records changes so quickly in the review environment, we cannot verify that one. + pass + else: + core.delete_osti_record(elink_review_client, osti_id, "Test Completed.") + pytest.fail( + f"The submitted record's {keyword} does not match the retrieved record's {keyword}: {getattr(record_submit, keyword)} != {value}" + ) + + core.delete_osti_record(elink_review_client, osti_id, "Test Completed.") + + +def test_update_state_of_osti_record(elink_review_client): + record_submit = core.submit_new_osti_record( + elink_review_client, + new_values={"title": "SUBMIT ONLY Test Updated State OSTI Record | Pytest"}, + ) + osti_id = record_submit.osti_id + + record_updated_save = core.update_state_of_osti_record( + elink_review_client, osti_id, "save" + ) + try: + assert record_updated_save.workflow_status == "SA" + assert record_updated_save.revision == 2 + except Exception: + core.delete_osti_record( + elink_review_client, osti_id, "Failed to change to saved." + ) + pytest.fail( + f"Failed to updated to save status: Workflow Status at Fail == {record_updated_save.workflow_status} and revision # == {record_updated_save.revision}" + ) + + record_updated_save = core.update_state_of_osti_record( + elink_review_client, osti_id, "submit" + ) + try: + assert record_updated_save.workflow_status == "SO" + assert record_updated_save.revision == 3 + except Exception: + # core.delete_osti_record(elink_review_client, osti_id, "Failed to change to submit.") + pytest.fail( + f"Failed to update to submit status: Workflow Status at Fail == {record_updated_save.workflow_status} and Revision # == {record_updated_save.revision}" + ) + # Need to ask about the desired functionality updating state to submit... + + core.delete_osti_record(elink_review_client, osti_id, "Test Completed.") diff --git a/tests/test_elink_api.py b/tests/test_elink_api.py new file mode 100644 index 0000000..ed55236 --- /dev/null +++ b/tests/test_elink_api.py @@ -0,0 +1,149 @@ +# import os +# from dotenv import load_dotenv + +# from elinkapi import Elink, Record, exceptions +# import pytest + +# from pymongo import MongoClient +# import pymongo + +# load_dotenv() + +# atlas_user = os.environ.get("atlas_user") +# atlas_password = os.environ.get("atlas_password") +# atlas_host = os.environ.get("atlas_host") +# mongo_uri = f"mongodb+srv://{atlas_user}:{atlas_password}@{atlas_host}/" + +# api = Elink(token=os.environ.get("elink_api_PRODUCTION_key")) # target default is production E-link service. + +# # ### Grabbing an existing record + +# # # record = api.get_single_record(mp-id) # test for silicon + +# # # type(record) + +# # # ELinkGotRecordModel = ELinkGetResponseModel.from_elinkapi_record(record) + +# # # print(ELinkGotRecordModel.get_title()) +# # # print(ELinkGotRecordModel.get_site_url()) +# # # print(ELinkGotRecordModel.get_keywords()) +# # # print(ELinkGotRecordModel.get_default_description()) + +# # # ELinkTestGetRecordModel = TestClass(**record.model_dump()) + +# # ### Making a new record + +# # # with MongoClient(mongo_uri) as client: +# # # #get all material_ids and dois from doi collection +# # # doi_collection = client["mp_core"]["dois"] +# # # materials_to_update = list(doi_collection.find({}, {"_id": 0, "material_id": 1, "doi": 1}, limit=10)) +# # # material_ids = [entry["material_id"] for entry in materials_to_update] + +# # # # check # of material_ids from DOI collection vs amount in robocrys + +# # # # get description for material_ids from robocrys collection +# # # coll = client["mp_core_blue"]["robocrys"] +# # # res = list(coll.find({"material_id": {"$in": material_ids}}, {"_id": 0, "material_id": 1, "description": 1})) + +# # # # join on material_id +# # # for doc in res: +# # # mat = next(filter(lambda x: x["material_id"] == doc["material_id"], materials_to_update)) +# # # doc["doi"] = mat["doi"] + + +# # # {"material_id": ..., "doi": ..., "description": ...} -> +# # # Record( +# # # template_fields ..., +# # # doi: ..., +# # # description: ..., +# # # fields_where_material_id_makes_sense: ..., +# # # ) + +# # # with the client open +# # with MongoClient(mongo_uri) as client: +# # # get all dois from the collection +# # doi_collection = client["mp_core"]["dois"] +# # materials_to_update = list( +# # doi_collection.find({}, {"_id": 0, "doi": 1, "material_id": 1}, limit=2) +# # ) + +# # # from the doi collection, grab the material_id and doi of each material +# # material_ids = [entry["material_id"] for entry in materials_to_update] + +# # # additionally, gain the osti id from the doi +# # osti_ids = [entry["doi"].split("10.17188/")[1] for entry in materials_to_update] + +# # # additionally, grab the description of each material from the robocrys +# # coll = client["mp_core_blue"][ +# # "robocrys" +# # ] # grabs robocrys collection from active database +# # res = list( +# # coll.find( +# # {"material_id": {"$in": material_ids}}, +# # {"_id": 0, "material_id": 1, "description": 1}, +# # ) +# # ) # grabs the material id and description of entries in the collection +# # descriptions = [entry["description"] for entry in res] + +# # # for each material (and its material_id, doi, and osti_id) +# # for i in range(len(materials_to_update)): +# # internal_material_id = material_ids[i] +# # internal_osti_id = osti_ids[i] +# # internal_description = descriptions[i] + +# # # get_single_record(osti_id) +# # record = api.get_single_record(internal_osti_id) + +# # print( +# # f"\n \n \nPrinting what is currently on ELINK for {internal_material_id}*****************************************" +# # ) +# # print(record) + +# # with the client open +# with MongoClient(mongo_uri) as client: +# # get all dois from the collection +# doi_collection = client["mp_core"]["dois"] +# materials_to_update = list(doi_collection.find({}, {"_id": 0, "doi": 1, "material_id": 1}, limit=2)) + +# # print( +# # f"\n \n \nPrinting record for {internal_material_id}~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~" +# # ) +# # print(record) + +# # additionally, gain the osti id from the doi +# osti_ids = [entry["doi"].split("10.17188/")[1] for entry in materials_to_update] + +# # additionally, grab the description of each material from the robocrys +# coll = client["mp_core_blue"]["robocrys"] # grabs robocrys collection from active database +# res = list(coll.find({"material_id": {"$in": material_ids}}, {"_id": 0, "material_id": 1, "description": 1})) # grabs the material id and description of entries in the collection +# descriptions = [entry["description"] for entry in res] + +# # for each material (and its material_id, doi, and osti_id) +# for i in range(len(materials_to_update)): +# internal_material_id = material_ids[i] +# internal_osti_id = osti_ids[i] +# internal_description = descriptions[i] + +# # get_single_record(osti_id) +# record = api.get_single_record(internal_osti_id) + +# print(f"\n \n \nPrinting what is currently on ELINK for {internal_material_id}*****************************************") +# print(record) + +# if internal_material_id == record.site_unique_id: +# # update description +# record.description = "testTESTtestTESTtest" + +# print(f"\n \n \nPrinting record for {internal_material_id}~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~") +# print(record) + +# # # post updated record +# # try: +# # saved_record = api.post_new_record(record, "save") +# # except exceptions.BadRequestException as ve: +# # ... +# # # ve.message = "Site Code AAAA is not valid." +# # # ve.errors provides more details: +# # # [{"status":"400", "detail":"Site Code AAAA is not valid.", "source":{"pointer":"site_ownership_code"}}] + + diff --git a/uv.lock b/uv.lock index 970822d..178fa79 100644 --- a/uv.lock +++ b/uv.lock @@ -118,7 +118,7 @@ wheels = [ [[package]] name = "elinkapi" -version = "0.5.1" +version = "0.5.2" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "pydantic" }, @@ -126,9 +126,9 @@ dependencies = [ { name = "requests-toolbelt" }, { name = "urllib3" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/77/11/aa584c66c16a417433a6ac51d232e4cf35a1b5c5a8a747193c73503c8b14/elinkapi-0.5.1.tar.gz", hash = "sha256:33e73648bcb5272e458215698219dcc1c09645f0726798883a2adcdc07f5e00e", size = 51606, upload-time = "2025-08-06T17:49:57.796Z" } +sdist = { url = "https://files.pythonhosted.org/packages/73/39/f652cd398619b6bd9f4a1fc8c9255202c130b36e8c87df1872100f5d744f/elinkapi-0.5.2.tar.gz", hash = "sha256:934134500721aba9e5e37aab232f3c7aa548c87a114ae71f0f0d1b27240dda02", size = 52202, upload-time = "2025-08-08T15:18:00.606Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/86/28/dec8dfc0a2ddd7ba16a90c29bb7c832f9323b5b2c6bb9699244601bdb289/elinkapi-0.5.1-py3-none-any.whl", hash = "sha256:0ab14ed05a5860480697dba860cb684b77cda042006212e597b5c5ec253df481", size = 37695, upload-time = "2025-08-06T17:49:56.434Z" }, + { url = "https://files.pythonhosted.org/packages/1f/da/27cb5c19f8971c18e9a2b2ebd8db56e8f01e718bf0c44bd7cb632574cb12/elinkapi-0.5.2-py3-none-any.whl", hash = "sha256:eb372efc3e6683a6c95e807f3450b51cfe55daff0c268081c606b975a35ee308", size = 37895, upload-time = "2025-08-08T15:17:59.299Z" }, ] [[package]] @@ -321,7 +321,7 @@ lint = [ [package.metadata] requires-dist = [ - { name = "elinkapi", specifier = ">=0.4.9" }, + { name = "elinkapi", specifier = ">=0.5.2" }, { name = "pydantic", specifier = ">=2.11.7" }, { name = "pymongo", specifier = ">=4.13.2" }, { name = "python-dotenv", specifier = ">=1.1.1" }, From f5bd528f59f2338f40737e9b4998d9cc128a4306 Mon Sep 17 00:00:00 2001 From: HugoOnghai Date: Tue, 12 Aug 2025 15:13:55 -0700 Subject: [PATCH 59/65] Fixed some issue with importing conf_test prefixes, and also added a temporary debug test for updating states... --- tests/elinkapi_test.py | 1 + tests/test_core.py | 95 ++++++++++++++++++++++++++++++++++++++---- 2 files changed, 89 insertions(+), 7 deletions(-) diff --git a/tests/elinkapi_test.py b/tests/elinkapi_test.py index 9253f97..410ac1f 100644 --- a/tests/elinkapi_test.py +++ b/tests/elinkapi_test.py @@ -15,6 +15,7 @@ ) # cannot find a good workaround for this with relative importing... import src.mp_cite.core as core + load_dotenv() valid_save_json = { diff --git a/tests/test_core.py b/tests/test_core.py index 2f2be67..ac093d2 100644 --- a/tests/test_core.py +++ b/tests/test_core.py @@ -4,6 +4,9 @@ sys.path.insert(0, os.path.abspath(os.path.join(os.path.dirname(__file__), ".."))) import src.mp_cite.core as core + +from elinkapi import Organization, Person, Record + import pytest @@ -41,14 +44,16 @@ def test_update_existing_osti_record(elink_review_client): # core.delete_osti_record(elink_review_client, osti_id, "Test Failed!") # pytest.fail("Failed to patch!") - record = core.update_existing_osti_record( - elink_review_client, - osti_id, - {"description": "This is a new robocrys description"}, - new_state="save", - ) + # record = core.update_existing_osti_record( + # elink_review_client, + # osti_id, + # {"description": "This is a new robocrys description"}, + # new_state="save", + # ) - # elink_review_client.patch_record(osti_id, { """description""": """This is a new robocrys description""" }) + elink_review_client.patch_record( + osti_id, {"description": "This is a new description"} + ) assert record.workflow_status == "SA" assert record.description == "This is a new robocrys description" @@ -69,6 +74,7 @@ def test_submit_new_osti_record(elink_review_client): elink_review_client, new_values={"title": "Test Submit New OSTI Record | Pytest"}, ) + osti_id = record_submit.osti_id record_got = elink_review_client.get_single_record(osti_id) @@ -91,6 +97,7 @@ def test_update_state_of_osti_record(elink_review_client): elink_review_client, new_values={"title": "SUBMIT ONLY Test Updated State OSTI Record | Pytest"}, ) + osti_id = record_submit.osti_id record_updated_save = core.update_state_of_osti_record( @@ -121,3 +128,77 @@ def test_update_state_of_osti_record(elink_review_client): # Need to ask about the desired functionality updating state to submit... core.delete_osti_record(elink_review_client, osti_id, "Test Completed.") + + +def test_update_state_debug(elink_review_client): + my_record_dict = { + "product_type": "DA", + "title": "My Dataset", + "organizations": [ + Organization(type="RESEARCHING", name="LBNL Materials Project (LBNL-MP)"), + Organization( + type="SPONSOR", + name="TEST SPONSOR ORG", + identifiers=[{"type": "CN_DOE", "value": "AC02-05CH11231"}], + ), # sponsor org is necessary for submission + ], + "persons": [Person(type="AUTHOR", last_name="Persson")], + "site_ownership_code": "LBNL-MP", + "access_limitations": ["UNL"], + "publication_date": "2025-8-12", + "site_url": "https://next-gen.materialsproject.org/materials", + } + + my_record = Record(**my_record_dict) + + # save in post then update to submit + my_rr = elink_review_client.post_new_record(my_record, "save") + osti_id = my_rr.osti_id + print( + f'After post_new_record(my_record, "save"), my record response workflow_status is {my_rr.workflow_status}' + ) + print(f"Revision Number is {my_rr.revision}") + + got_record = elink_review_client.get_single_record(osti_id) + record_updated_state = elink_review_client.update_record( + osti_id, got_record, "submit" + ) + print( + f'After update_record(osti_id, got_record, "submit"), my record response workflow_status is {record_updated_state.workflow_status}' + ) + print(f"Revision Number is {record_updated_state.revision}\n") + + # submit in post then update to save + record_submit_first = elink_review_client.post_new_record(my_record, "submit") + osti_id = record_submit_first.osti_id + print( + f'Instead of saving, if I post_new_record(my_record, "submit") immediately, then my record response workflow status is {record_submit_first.workflow_status}' + ) + print(f"And revision number is {record_submit_first.revision}") + + got_submitted_record = elink_review_client.get_single_record(osti_id) + record_updated_state = elink_review_client.update_record( + osti_id, got_submitted_record, "submit" + ) + print( + f'After update_record(osti_id, got_record, "save"), my record response workflow_status is {record_updated_state.workflow_status}' + ) + print(f"And the revision number is {record_updated_state.revision}\n") + + # update the workflow_status manually? + record_to_manual_update = elink_review_client.post_new_record(my_record, "save") + osti_id = record_to_manual_update.osti_id + print( + f'As expected, after post_new_record(my_record, "save"), the workflow status is {record_to_manual_update.workflow_status}' + ) + print(f"And the revision number is {record_to_manual_update.revision}") + + got_record_to_manual_update = elink_review_client.get_single_record(osti_id) + got_record_to_manual_update.workflow_status = "SO" + record_after_manual_update = elink_review_client.update_record( + osti_id, got_record_to_manual_update, "submit" + ) + print( + f'After update_record(osti_id, got_record_to_manual_update, "submit"), my record response workflow_status is {record_after_manual_update.workflow_status}' + ) + print(f"Revision Number is {record_after_manual_update.revision}\n") From 03958b6dc454b82b46305e338726fec79723d420 Mon Sep 17 00:00:00 2001 From: HugoOnghai Date: Tue, 12 Aug 2025 15:36:31 -0700 Subject: [PATCH 60/65] Fixed some issue with importing conf_test prefixes being blocked by pre-commit since they aren't being seen as accessed --- tests/elink_service_test.py | 2 ++ tests/elinkapi_test.py | 1 + tests/test_core.py | 17 +++++------------ 3 files changed, 8 insertions(+), 12 deletions(-) diff --git a/tests/elink_service_test.py b/tests/elink_service_test.py index 0a6515d..7de2c01 100644 --- a/tests/elink_service_test.py +++ b/tests/elink_service_test.py @@ -3,6 +3,8 @@ import pytest from dotenv import load_dotenv +from tests.conf_test import elink_review_client + load_dotenv() # TODO: Write tests that verify our usage of Elink is correct, diff --git a/tests/elinkapi_test.py b/tests/elinkapi_test.py index 410ac1f..443b9a0 100644 --- a/tests/elinkapi_test.py +++ b/tests/elinkapi_test.py @@ -15,6 +15,7 @@ ) # cannot find a good workaround for this with relative importing... import src.mp_cite.core as core +from tests.conf_test import elink_review_client load_dotenv() diff --git a/tests/test_core.py b/tests/test_core.py index ac093d2..7587f43 100644 --- a/tests/test_core.py +++ b/tests/test_core.py @@ -4,6 +4,7 @@ sys.path.insert(0, os.path.abspath(os.path.join(os.path.dirname(__file__), ".."))) import src.mp_cite.core as core +from tests.conf_test import elink_review_client from elinkapi import Organization, Person, Record @@ -154,18 +155,14 @@ def test_update_state_debug(elink_review_client): # save in post then update to submit my_rr = elink_review_client.post_new_record(my_record, "save") osti_id = my_rr.osti_id - print( - f'After post_new_record(my_record, "save"), my record response workflow_status is {my_rr.workflow_status}' - ) + print(f'After post_new_record(my_record, "save"), my record response workflow_status is {my_rr.workflow_status}') print(f"Revision Number is {my_rr.revision}") got_record = elink_review_client.get_single_record(osti_id) record_updated_state = elink_review_client.update_record( osti_id, got_record, "submit" ) - print( - f'After update_record(osti_id, got_record, "submit"), my record response workflow_status is {record_updated_state.workflow_status}' - ) + print(f'After update_record(osti_id, got_record, "submit"), my record response workflow_status is {record_updated_state.workflow_status}') print(f"Revision Number is {record_updated_state.revision}\n") # submit in post then update to save @@ -177,12 +174,8 @@ def test_update_state_debug(elink_review_client): print(f"And revision number is {record_submit_first.revision}") got_submitted_record = elink_review_client.get_single_record(osti_id) - record_updated_state = elink_review_client.update_record( - osti_id, got_submitted_record, "submit" - ) - print( - f'After update_record(osti_id, got_record, "save"), my record response workflow_status is {record_updated_state.workflow_status}' - ) + record_updated_state = elink_review_client.update_record(osti_id, got_submitted_record, "submit") + print(f'After update_record(osti_id, got_submitted_record, "save"), my record response workflow_status is {record_updated_state.workflow_status}') print(f"And the revision number is {record_updated_state.revision}\n") # update the workflow_status manually? From e2bd3fb68fccdb3ddb5017ee858d0dc0097408bf Mon Sep 17 00:00:00 2001 From: HugoOnghai Date: Fri, 15 Aug 2025 16:28:46 -0700 Subject: [PATCH 61/65] Cleaning up for final merge --- src/mp_cite/core.py | 19 +++-- tests/elink_service_test.py | 1 - tests/test_elink_api.py | 149 ------------------------------------ 3 files changed, 12 insertions(+), 157 deletions(-) delete mode 100644 tests/test_elink_api.py diff --git a/src/mp_cite/core.py b/src/mp_cite/core.py index df92ea7..ead6f2a 100644 --- a/src/mp_cite/core.py +++ b/src/mp_cite/core.py @@ -107,16 +107,21 @@ def update_existing_osti_record( Instead, we leave this for the user. """ - # record_on_elink = elinkapi.get_single_record(osti_id) + record_on_elink = elinkapi.get_single_record(osti_id) - # for keyword in new_values: - # setattr(record_on_elink, keyword, new_values[keyword]) + for keyword in new_values: + setattr(record_on_elink, keyword, new_values[keyword]) - # return elinkapi.update_record( - # osti_id, record_on_elink, state="save" - # ) # user should use update_state_of_osti_record to submit instead + if new_state == "submit": + # due to bug in elinkapi version<=0.5.2, new_state passing does not update workflow status. + # for now, it is updated manually to 'SO' submitted to OSTI, which is should do itself. + record_on_elink.workflow_status = "SO" - elinkapi.patch_record(osti_id, new_values, new_state) + return elinkapi.update_record(osti_id, record_on_elink, state=new_state) + + # due to bug in elinkapi version<=0.5.2, elinkapi.patch_record fails due to bad casting of dict to str. + # when the next release fixes this, we can change it to this again below. + # elinkapi.patch_record(osti_id, new_values, new_state) def submit_new_osti_record( diff --git a/tests/elink_service_test.py b/tests/elink_service_test.py index 7de2c01..84f761d 100644 --- a/tests/elink_service_test.py +++ b/tests/elink_service_test.py @@ -3,7 +3,6 @@ import pytest from dotenv import load_dotenv -from tests.conf_test import elink_review_client load_dotenv() diff --git a/tests/test_elink_api.py b/tests/test_elink_api.py deleted file mode 100644 index ed55236..0000000 --- a/tests/test_elink_api.py +++ /dev/null @@ -1,149 +0,0 @@ -# import os -# from dotenv import load_dotenv - -# from elinkapi import Elink, Record, exceptions -# import pytest - -# from pymongo import MongoClient -# import pymongo - -# load_dotenv() - -# atlas_user = os.environ.get("atlas_user") -# atlas_password = os.environ.get("atlas_password") -# atlas_host = os.environ.get("atlas_host") -# mongo_uri = f"mongodb+srv://{atlas_user}:{atlas_password}@{atlas_host}/" - -# api = Elink(token=os.environ.get("elink_api_PRODUCTION_key")) # target default is production E-link service. - -# # ### Grabbing an existing record - -# # # record = api.get_single_record(mp-id) # test for silicon - -# # # type(record) - -# # # ELinkGotRecordModel = ELinkGetResponseModel.from_elinkapi_record(record) - -# # # print(ELinkGotRecordModel.get_title()) -# # # print(ELinkGotRecordModel.get_site_url()) -# # # print(ELinkGotRecordModel.get_keywords()) -# # # print(ELinkGotRecordModel.get_default_description()) - -# # # ELinkTestGetRecordModel = TestClass(**record.model_dump()) - -# # ### Making a new record - -# # # with MongoClient(mongo_uri) as client: -# # # #get all material_ids and dois from doi collection -# # # doi_collection = client["mp_core"]["dois"] -# # # materials_to_update = list(doi_collection.find({}, {"_id": 0, "material_id": 1, "doi": 1}, limit=10)) -# # # material_ids = [entry["material_id"] for entry in materials_to_update] - -# # # # check # of material_ids from DOI collection vs amount in robocrys - -# # # # get description for material_ids from robocrys collection -# # # coll = client["mp_core_blue"]["robocrys"] -# # # res = list(coll.find({"material_id": {"$in": material_ids}}, {"_id": 0, "material_id": 1, "description": 1})) - -# # # # join on material_id -# # # for doc in res: -# # # mat = next(filter(lambda x: x["material_id"] == doc["material_id"], materials_to_update)) -# # # doc["doi"] = mat["doi"] - - -# # # {"material_id": ..., "doi": ..., "description": ...} -> -# # # Record( -# # # template_fields ..., -# # # doi: ..., -# # # description: ..., -# # # fields_where_material_id_makes_sense: ..., -# # # ) - -# # # with the client open -# # with MongoClient(mongo_uri) as client: -# # # get all dois from the collection -# # doi_collection = client["mp_core"]["dois"] -# # materials_to_update = list( -# # doi_collection.find({}, {"_id": 0, "doi": 1, "material_id": 1}, limit=2) -# # ) - -# # # from the doi collection, grab the material_id and doi of each material -# # material_ids = [entry["material_id"] for entry in materials_to_update] - -# # # additionally, gain the osti id from the doi -# # osti_ids = [entry["doi"].split("10.17188/")[1] for entry in materials_to_update] - -# # # additionally, grab the description of each material from the robocrys -# # coll = client["mp_core_blue"][ -# # "robocrys" -# # ] # grabs robocrys collection from active database -# # res = list( -# # coll.find( -# # {"material_id": {"$in": material_ids}}, -# # {"_id": 0, "material_id": 1, "description": 1}, -# # ) -# # ) # grabs the material id and description of entries in the collection -# # descriptions = [entry["description"] for entry in res] - -# # # for each material (and its material_id, doi, and osti_id) -# # for i in range(len(materials_to_update)): -# # internal_material_id = material_ids[i] -# # internal_osti_id = osti_ids[i] -# # internal_description = descriptions[i] - -# # # get_single_record(osti_id) -# # record = api.get_single_record(internal_osti_id) - -# # print( -# # f"\n \n \nPrinting what is currently on ELINK for {internal_material_id}*****************************************" -# # ) -# # print(record) - -# # with the client open -# with MongoClient(mongo_uri) as client: -# # get all dois from the collection -# doi_collection = client["mp_core"]["dois"] -# materials_to_update = list(doi_collection.find({}, {"_id": 0, "doi": 1, "material_id": 1}, limit=2)) - -# # print( -# # f"\n \n \nPrinting record for {internal_material_id}~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~" -# # ) -# # print(record) - -# # additionally, gain the osti id from the doi -# osti_ids = [entry["doi"].split("10.17188/")[1] for entry in materials_to_update] - -# # additionally, grab the description of each material from the robocrys -# coll = client["mp_core_blue"]["robocrys"] # grabs robocrys collection from active database -# res = list(coll.find({"material_id": {"$in": material_ids}}, {"_id": 0, "material_id": 1, "description": 1})) # grabs the material id and description of entries in the collection -# descriptions = [entry["description"] for entry in res] - -# # for each material (and its material_id, doi, and osti_id) -# for i in range(len(materials_to_update)): -# internal_material_id = material_ids[i] -# internal_osti_id = osti_ids[i] -# internal_description = descriptions[i] - -# # get_single_record(osti_id) -# record = api.get_single_record(internal_osti_id) - -# print(f"\n \n \nPrinting what is currently on ELINK for {internal_material_id}*****************************************") -# print(record) - -# if internal_material_id == record.site_unique_id: -# # update description -# record.description = "testTESTtestTESTtest" - -# print(f"\n \n \nPrinting record for {internal_material_id}~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~") -# print(record) - -# # # post updated record -# # try: -# # saved_record = api.post_new_record(record, "save") -# # except exceptions.BadRequestException as ve: -# # ... -# # # ve.message = "Site Code AAAA is not valid." -# # # ve.errors provides more details: -# # # [{"status":"400", "detail":"Site Code AAAA is not valid.", "source":{"pointer":"site_ownership_code"}}] - - From ddf57de4018852cf80cb38ce156d9c144f8127b3 Mon Sep 17 00:00:00 2001 From: HugoOnghai Date: Fri, 15 Aug 2025 16:51:25 -0700 Subject: [PATCH 62/65] Rebased to newly synched master --- src/mp_cite/core.py | 16 ---------------- 1 file changed, 16 deletions(-) diff --git a/src/mp_cite/core.py b/src/mp_cite/core.py index ead6f2a..0a8edb0 100644 --- a/src/mp_cite/core.py +++ b/src/mp_cite/core.py @@ -1,4 +1,3 @@ -<<<<<<< HEAD from typing import Literal, TypeAlias import requests @@ -8,21 +7,6 @@ from pymongo import MongoClient from mp_cite.models import MinimumDARecord -======= -from typing import TypeAlias - -from elinkapi import Elink -from elinkapi.record import RecordResponse -from pymongo import MongoClient - -import requests -from elinkapi.utils import Validation - - -from .models import MinimumDARecord - -from typing import Literal ->>>>>>> 0346ff4 (Removed extra files in tests/ and updated email on pyproject.toml) OstiID: TypeAlias = int From d8d3e5b50cd66b32138f20a6a9604ab2fa47945b Mon Sep 17 00:00:00 2001 From: HugoOnghai Date: Fri, 15 Aug 2025 17:36:03 -0700 Subject: [PATCH 63/65] added switching states behavior test, seems like it was fixed on the backend already though --- tests/elink_service_test.py | 95 ++++++++++++++++++++++++++++++++----- tests/elinkapi_test.py | 3 +- tests/test_core.py | 78 ++++-------------------------- 3 files changed, 92 insertions(+), 84 deletions(-) diff --git a/tests/elink_service_test.py b/tests/elink_service_test.py index 84f761d..346e264 100644 --- a/tests/elink_service_test.py +++ b/tests/elink_service_test.py @@ -1,22 +1,17 @@ -from elinkapi import exceptions +from elinkapi import exceptions, Person, Organization, Record from elinkapi.record import RecordResponse import pytest from dotenv import load_dotenv +from .conf_test import elink_production_client, elink_review_client -load_dotenv() - -# TODO: Write tests that verify our usage of Elink is correct, -# and make sure any upstream breaking changes get caught -# here when version upgrades happen - +import sys +import os -# 1. general query logic + params that we use regularly? -# 2. make sure we can submit a correctly templated dataset submission -# 3. make sure record updates work -# 4. deleting records? -# 5+. test any other surfaces of the Elink api that we interact with +sys.path.insert(0, os.path.abspath(os.path.join(os.path.dirname(__file__), ".."))) +import src.mp_cite.core as core +load_dotenv() def test_get_single_record(elink_production_client): """ @@ -57,3 +52,79 @@ def test_query_exists(elink_review_client): tests to see that the query does in fact resolve entries in the form of RecordResponse objects. """ assert isinstance(next(elink_review_client.query_records()), RecordResponse) + +def test_switching_states(elink_review_client): + """ + This test repeats the tests done to demonstrate unexpected behavior or passing "save" and "submit" states present in Elinkapi 0.5.2. + """ + my_record_dict = { + "product_type": "DA", + "title": "My Dataset", + "organizations": [ + Organization(type="RESEARCHING", name="LBNL Materials Project (LBNL-MP)"), + Organization( + type="SPONSOR", + name="TEST SPONSOR ORG", + identifiers=[{"type": "CN_DOE", "value": "AC02-05CH11231"}], + ), # sponsor org is necessary for submission + ], + "persons": [Person(type="AUTHOR", last_name="Persson")], + "site_ownership_code": "LBNL-MP", + "access_limitations": ["UNL"], + "publication_date": "2025-8-12", + "site_url": "https://next-gen.materialsproject.org/materials", + } + + my_record = Record(**my_record_dict) + + # save in post then update to submit + try: + my_rr = elink_review_client.post_new_record(my_record, "save") + osti_id = my_rr.osti_id + assert my_rr.workflow_status == 'SA' + assert my_rr.revision == 1 + + got_record = elink_review_client.get_single_record(osti_id) + record_updated_state = elink_review_client.update_record( + osti_id, got_record, "submit" + ) + assert record_updated_state.workflow_status == 'SO' + assert record_updated_state.revision == 2 + core.delete_osti_record(elink_review_client, osti_id, "Test completed!") + except Exception as e: + core.delete_osti_record(elink_review_client, osti_id, "Test failed!") + pytest.fail("Test failed!") + + + # submit in post then update to save + try: + record_submit_first = elink_review_client.post_new_record(my_record, "submit") + osti_id = record_submit_first.osti_id + assert record_submit_first.workflow_status == 'SO' + assert record_submit_first.revision == 1 + + got_submitted_record = elink_review_client.get_single_record(osti_id) + record_updated_state = elink_review_client.update_record(osti_id, got_submitted_record, "save") + assert record_updated_state.workflow_status == 'SA' # record was submitted but switched to save, so should be 'SA' + assert record_updated_state.revision == 2 + core.delete_osti_record(elink_review_client, osti_id, "Test completed!") + except Exception as e: + core.delete_osti_record(elink_review_client, osti_id, "Test failed!") + pytest.fail("Test failed!") + + # update the workflow_status manually? + try: + record_to_manual_update = elink_review_client.post_new_record(my_record, "save") + osti_id = record_to_manual_update.osti_id + assert record_to_manual_update.workflow_status == 'SA' + assert record_to_manual_update.revision == 1 + + got_record_to_manual_update = elink_review_client.get_single_record(osti_id) + got_record_to_manual_update.workflow_status = 'SO' + record_after_manual_update = elink_review_client.update_record(osti_id, got_record_to_manual_update, "submit") + assert record_after_manual_update.workflow_status == 'SO' + assert record_after_manual_update.revision == 2 + core.delete_osti_record(elink_review_client, osti_id, "Test completed!") + except Exception as e: + core.delete_osti_record(elink_review_client, osti_id, "Test failed!") + pytest.fail("Test failed!") \ No newline at end of file diff --git a/tests/elinkapi_test.py b/tests/elinkapi_test.py index 443b9a0..0d2efd7 100644 --- a/tests/elinkapi_test.py +++ b/tests/elinkapi_test.py @@ -8,6 +8,7 @@ from dotenv import load_dotenv from datetime import datetime +from .conf_test import elink_production_client, elink_review_client sys.path.insert(0, os.path.abspath(os.path.join(os.path.dirname(__file__), ".."))) from src.mp_cite.models import ( @@ -15,8 +16,6 @@ ) # cannot find a good workaround for this with relative importing... import src.mp_cite.core as core -from tests.conf_test import elink_review_client - load_dotenv() valid_save_json = { diff --git a/tests/test_core.py b/tests/test_core.py index 7587f43..8b14e0b 100644 --- a/tests/test_core.py +++ b/tests/test_core.py @@ -4,9 +4,7 @@ sys.path.insert(0, os.path.abspath(os.path.join(os.path.dirname(__file__), ".."))) import src.mp_cite.core as core -from tests.conf_test import elink_review_client - -from elinkapi import Organization, Person, Record +from .conf_test import elink_review_client import pytest @@ -52,6 +50,7 @@ def test_update_existing_osti_record(elink_review_client): # new_state="save", # ) + # elinkapi<=0.5.2 this fails the test elink_review_client.patch_record( osti_id, {"description": "This is a new description"} ) @@ -81,8 +80,13 @@ def test_submit_new_osti_record(elink_review_client): record_got = elink_review_client.get_single_record(osti_id) for keyword, value in record_got: - if keyword == "workflow_status" or getattr(record_submit, keyword) == value: + if ( + keyword == "workflow_status" + or keyword == "audit_logs" + or getattr(record_submit, keyword) == value + ): # since the workflow_status of submitted osti records changes so quickly in the review environment, we cannot verify that one. + # audit logs updated during and after submission so it will change. pass else: core.delete_osti_record(elink_review_client, osti_id, "Test Completed.") @@ -129,69 +133,3 @@ def test_update_state_of_osti_record(elink_review_client): # Need to ask about the desired functionality updating state to submit... core.delete_osti_record(elink_review_client, osti_id, "Test Completed.") - - -def test_update_state_debug(elink_review_client): - my_record_dict = { - "product_type": "DA", - "title": "My Dataset", - "organizations": [ - Organization(type="RESEARCHING", name="LBNL Materials Project (LBNL-MP)"), - Organization( - type="SPONSOR", - name="TEST SPONSOR ORG", - identifiers=[{"type": "CN_DOE", "value": "AC02-05CH11231"}], - ), # sponsor org is necessary for submission - ], - "persons": [Person(type="AUTHOR", last_name="Persson")], - "site_ownership_code": "LBNL-MP", - "access_limitations": ["UNL"], - "publication_date": "2025-8-12", - "site_url": "https://next-gen.materialsproject.org/materials", - } - - my_record = Record(**my_record_dict) - - # save in post then update to submit - my_rr = elink_review_client.post_new_record(my_record, "save") - osti_id = my_rr.osti_id - print(f'After post_new_record(my_record, "save"), my record response workflow_status is {my_rr.workflow_status}') - print(f"Revision Number is {my_rr.revision}") - - got_record = elink_review_client.get_single_record(osti_id) - record_updated_state = elink_review_client.update_record( - osti_id, got_record, "submit" - ) - print(f'After update_record(osti_id, got_record, "submit"), my record response workflow_status is {record_updated_state.workflow_status}') - print(f"Revision Number is {record_updated_state.revision}\n") - - # submit in post then update to save - record_submit_first = elink_review_client.post_new_record(my_record, "submit") - osti_id = record_submit_first.osti_id - print( - f'Instead of saving, if I post_new_record(my_record, "submit") immediately, then my record response workflow status is {record_submit_first.workflow_status}' - ) - print(f"And revision number is {record_submit_first.revision}") - - got_submitted_record = elink_review_client.get_single_record(osti_id) - record_updated_state = elink_review_client.update_record(osti_id, got_submitted_record, "submit") - print(f'After update_record(osti_id, got_submitted_record, "save"), my record response workflow_status is {record_updated_state.workflow_status}') - print(f"And the revision number is {record_updated_state.revision}\n") - - # update the workflow_status manually? - record_to_manual_update = elink_review_client.post_new_record(my_record, "save") - osti_id = record_to_manual_update.osti_id - print( - f'As expected, after post_new_record(my_record, "save"), the workflow status is {record_to_manual_update.workflow_status}' - ) - print(f"And the revision number is {record_to_manual_update.revision}") - - got_record_to_manual_update = elink_review_client.get_single_record(osti_id) - got_record_to_manual_update.workflow_status = "SO" - record_after_manual_update = elink_review_client.update_record( - osti_id, got_record_to_manual_update, "submit" - ) - print( - f'After update_record(osti_id, got_record_to_manual_update, "submit"), my record response workflow_status is {record_after_manual_update.workflow_status}' - ) - print(f"Revision Number is {record_after_manual_update.revision}\n") From c5889b7750172a1abbfd6aa4c7174ce080b6d9f1 Mon Sep 17 00:00:00 2001 From: HugoOnghai Date: Fri, 15 Aug 2025 17:39:26 -0700 Subject: [PATCH 64/65] Cleaned up .gitignore and re-locked uv lock --- .gitignore | 10 +--- uv.lock | 160 +++++++++++++++++++++++++++-------------------------- 2 files changed, 86 insertions(+), 84 deletions(-) diff --git a/.gitignore b/.gitignore index c84ff87..8241d4c 100644 --- a/.gitignore +++ b/.gitignore @@ -210,10 +210,6 @@ __marimo__/ # Streamlit .streamlit/secrets.toml -# json files for storing production records -*.json -.env - -/json_pages -/notebooks -/test_json_pages +json_pages/ +notebooks/ +test_json_pages/ \ No newline at end of file diff --git a/uv.lock b/uv.lock index 178fa79..0bc5a7b 100644 --- a/uv.lock +++ b/uv.lock @@ -31,50 +31,55 @@ wheels = [ [[package]] name = "charset-normalizer" -version = "3.4.2" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/e4/33/89c2ced2b67d1c2a61c19c6751aa8902d46ce3dacb23600a283619f5a12d/charset_normalizer-3.4.2.tar.gz", hash = "sha256:5baececa9ecba31eff645232d59845c07aa030f0c81ee70184a90d35099a0e63", size = 126367, upload-time = "2025-05-02T08:34:42.01Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/05/85/4c40d00dcc6284a1c1ad5de5e0996b06f39d8232f1031cd23c2f5c07ee86/charset_normalizer-3.4.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:be1e352acbe3c78727a16a455126d9ff83ea2dfdcbc83148d2982305a04714c2", size = 198794, upload-time = "2025-05-02T08:32:11.945Z" }, - { url = "https://files.pythonhosted.org/packages/41/d9/7a6c0b9db952598e97e93cbdfcb91bacd89b9b88c7c983250a77c008703c/charset_normalizer-3.4.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:aa88ca0b1932e93f2d961bf3addbb2db902198dca337d88c89e1559e066e7645", size = 142846, upload-time = "2025-05-02T08:32:13.946Z" }, - { url = "https://files.pythonhosted.org/packages/66/82/a37989cda2ace7e37f36c1a8ed16c58cf48965a79c2142713244bf945c89/charset_normalizer-3.4.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d524ba3f1581b35c03cb42beebab4a13e6cdad7b36246bd22541fa585a56cccd", size = 153350, upload-time = "2025-05-02T08:32:15.873Z" }, - { url = "https://files.pythonhosted.org/packages/df/68/a576b31b694d07b53807269d05ec3f6f1093e9545e8607121995ba7a8313/charset_normalizer-3.4.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:28a1005facc94196e1fb3e82a3d442a9d9110b8434fc1ded7a24a2983c9888d8", size = 145657, upload-time = "2025-05-02T08:32:17.283Z" }, - { url = "https://files.pythonhosted.org/packages/92/9b/ad67f03d74554bed3aefd56fe836e1623a50780f7c998d00ca128924a499/charset_normalizer-3.4.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fdb20a30fe1175ecabed17cbf7812f7b804b8a315a25f24678bcdf120a90077f", size = 147260, upload-time = "2025-05-02T08:32:18.807Z" }, - { url = "https://files.pythonhosted.org/packages/a6/e6/8aebae25e328160b20e31a7e9929b1578bbdc7f42e66f46595a432f8539e/charset_normalizer-3.4.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0f5d9ed7f254402c9e7d35d2f5972c9bbea9040e99cd2861bd77dc68263277c7", size = 149164, upload-time = "2025-05-02T08:32:20.333Z" }, - { url = "https://files.pythonhosted.org/packages/8b/f2/b3c2f07dbcc248805f10e67a0262c93308cfa149a4cd3d1fe01f593e5fd2/charset_normalizer-3.4.2-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:efd387a49825780ff861998cd959767800d54f8308936b21025326de4b5a42b9", size = 144571, upload-time = "2025-05-02T08:32:21.86Z" }, - { url = "https://files.pythonhosted.org/packages/60/5b/c3f3a94bc345bc211622ea59b4bed9ae63c00920e2e8f11824aa5708e8b7/charset_normalizer-3.4.2-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:f0aa37f3c979cf2546b73e8222bbfa3dc07a641585340179d768068e3455e544", size = 151952, upload-time = "2025-05-02T08:32:23.434Z" }, - { url = "https://files.pythonhosted.org/packages/e2/4d/ff460c8b474122334c2fa394a3f99a04cf11c646da895f81402ae54f5c42/charset_normalizer-3.4.2-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:e70e990b2137b29dc5564715de1e12701815dacc1d056308e2b17e9095372a82", size = 155959, upload-time = "2025-05-02T08:32:24.993Z" }, - { url = "https://files.pythonhosted.org/packages/a2/2b/b964c6a2fda88611a1fe3d4c400d39c66a42d6c169c924818c848f922415/charset_normalizer-3.4.2-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:0c8c57f84ccfc871a48a47321cfa49ae1df56cd1d965a09abe84066f6853b9c0", size = 153030, upload-time = "2025-05-02T08:32:26.435Z" }, - { url = "https://files.pythonhosted.org/packages/59/2e/d3b9811db26a5ebf444bc0fa4f4be5aa6d76fc6e1c0fd537b16c14e849b6/charset_normalizer-3.4.2-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:6b66f92b17849b85cad91259efc341dce9c1af48e2173bf38a85c6329f1033e5", size = 148015, upload-time = "2025-05-02T08:32:28.376Z" }, - { url = "https://files.pythonhosted.org/packages/90/07/c5fd7c11eafd561bb51220d600a788f1c8d77c5eef37ee49454cc5c35575/charset_normalizer-3.4.2-cp311-cp311-win32.whl", hash = "sha256:daac4765328a919a805fa5e2720f3e94767abd632ae410a9062dff5412bae65a", size = 98106, upload-time = "2025-05-02T08:32:30.281Z" }, - { url = "https://files.pythonhosted.org/packages/a8/05/5e33dbef7e2f773d672b6d79f10ec633d4a71cd96db6673625838a4fd532/charset_normalizer-3.4.2-cp311-cp311-win_amd64.whl", hash = "sha256:e53efc7c7cee4c1e70661e2e112ca46a575f90ed9ae3fef200f2a25e954f4b28", size = 105402, upload-time = "2025-05-02T08:32:32.191Z" }, - { url = "https://files.pythonhosted.org/packages/d7/a4/37f4d6035c89cac7930395a35cc0f1b872e652eaafb76a6075943754f095/charset_normalizer-3.4.2-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:0c29de6a1a95f24b9a1aa7aefd27d2487263f00dfd55a77719b530788f75cff7", size = 199936, upload-time = "2025-05-02T08:32:33.712Z" }, - { url = "https://files.pythonhosted.org/packages/ee/8a/1a5e33b73e0d9287274f899d967907cd0bf9c343e651755d9307e0dbf2b3/charset_normalizer-3.4.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cddf7bd982eaa998934a91f69d182aec997c6c468898efe6679af88283b498d3", size = 143790, upload-time = "2025-05-02T08:32:35.768Z" }, - { url = "https://files.pythonhosted.org/packages/66/52/59521f1d8e6ab1482164fa21409c5ef44da3e9f653c13ba71becdd98dec3/charset_normalizer-3.4.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:fcbe676a55d7445b22c10967bceaaf0ee69407fbe0ece4d032b6eb8d4565982a", size = 153924, upload-time = "2025-05-02T08:32:37.284Z" }, - { url = "https://files.pythonhosted.org/packages/86/2d/fb55fdf41964ec782febbf33cb64be480a6b8f16ded2dbe8db27a405c09f/charset_normalizer-3.4.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d41c4d287cfc69060fa91cae9683eacffad989f1a10811995fa309df656ec214", size = 146626, upload-time = "2025-05-02T08:32:38.803Z" }, - { url = "https://files.pythonhosted.org/packages/8c/73/6ede2ec59bce19b3edf4209d70004253ec5f4e319f9a2e3f2f15601ed5f7/charset_normalizer-3.4.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4e594135de17ab3866138f496755f302b72157d115086d100c3f19370839dd3a", size = 148567, upload-time = "2025-05-02T08:32:40.251Z" }, - { url = "https://files.pythonhosted.org/packages/09/14/957d03c6dc343c04904530b6bef4e5efae5ec7d7990a7cbb868e4595ee30/charset_normalizer-3.4.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cf713fe9a71ef6fd5adf7a79670135081cd4431c2943864757f0fa3a65b1fafd", size = 150957, upload-time = "2025-05-02T08:32:41.705Z" }, - { url = "https://files.pythonhosted.org/packages/0d/c8/8174d0e5c10ccebdcb1b53cc959591c4c722a3ad92461a273e86b9f5a302/charset_normalizer-3.4.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:a370b3e078e418187da8c3674eddb9d983ec09445c99a3a263c2011993522981", size = 145408, upload-time = "2025-05-02T08:32:43.709Z" }, - { url = "https://files.pythonhosted.org/packages/58/aa/8904b84bc8084ac19dc52feb4f5952c6df03ffb460a887b42615ee1382e8/charset_normalizer-3.4.2-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:a955b438e62efdf7e0b7b52a64dc5c3396e2634baa62471768a64bc2adb73d5c", size = 153399, upload-time = "2025-05-02T08:32:46.197Z" }, - { url = "https://files.pythonhosted.org/packages/c2/26/89ee1f0e264d201cb65cf054aca6038c03b1a0c6b4ae998070392a3ce605/charset_normalizer-3.4.2-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:7222ffd5e4de8e57e03ce2cef95a4c43c98fcb72ad86909abdfc2c17d227fc1b", size = 156815, upload-time = "2025-05-02T08:32:48.105Z" }, - { url = "https://files.pythonhosted.org/packages/fd/07/68e95b4b345bad3dbbd3a8681737b4338ff2c9df29856a6d6d23ac4c73cb/charset_normalizer-3.4.2-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:bee093bf902e1d8fc0ac143c88902c3dfc8941f7ea1d6a8dd2bcb786d33db03d", size = 154537, upload-time = "2025-05-02T08:32:49.719Z" }, - { url = "https://files.pythonhosted.org/packages/77/1a/5eefc0ce04affb98af07bc05f3bac9094513c0e23b0562d64af46a06aae4/charset_normalizer-3.4.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:dedb8adb91d11846ee08bec4c8236c8549ac721c245678282dcb06b221aab59f", size = 149565, upload-time = "2025-05-02T08:32:51.404Z" }, - { url = "https://files.pythonhosted.org/packages/37/a0/2410e5e6032a174c95e0806b1a6585eb21e12f445ebe239fac441995226a/charset_normalizer-3.4.2-cp312-cp312-win32.whl", hash = "sha256:db4c7bf0e07fc3b7d89ac2a5880a6a8062056801b83ff56d8464b70f65482b6c", size = 98357, upload-time = "2025-05-02T08:32:53.079Z" }, - { url = "https://files.pythonhosted.org/packages/6c/4f/c02d5c493967af3eda9c771ad4d2bbc8df6f99ddbeb37ceea6e8716a32bc/charset_normalizer-3.4.2-cp312-cp312-win_amd64.whl", hash = "sha256:5a9979887252a82fefd3d3ed2a8e3b937a7a809f65dcb1e068b090e165bbe99e", size = 105776, upload-time = "2025-05-02T08:32:54.573Z" }, - { url = "https://files.pythonhosted.org/packages/ea/12/a93df3366ed32db1d907d7593a94f1fe6293903e3e92967bebd6950ed12c/charset_normalizer-3.4.2-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:926ca93accd5d36ccdabd803392ddc3e03e6d4cd1cf17deff3b989ab8e9dbcf0", size = 199622, upload-time = "2025-05-02T08:32:56.363Z" }, - { url = "https://files.pythonhosted.org/packages/04/93/bf204e6f344c39d9937d3c13c8cd5bbfc266472e51fc8c07cb7f64fcd2de/charset_normalizer-3.4.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:eba9904b0f38a143592d9fc0e19e2df0fa2e41c3c3745554761c5f6447eedabf", size = 143435, upload-time = "2025-05-02T08:32:58.551Z" }, - { url = "https://files.pythonhosted.org/packages/22/2a/ea8a2095b0bafa6c5b5a55ffdc2f924455233ee7b91c69b7edfcc9e02284/charset_normalizer-3.4.2-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3fddb7e2c84ac87ac3a947cb4e66d143ca5863ef48e4a5ecb83bd48619e4634e", size = 153653, upload-time = "2025-05-02T08:33:00.342Z" }, - { url = "https://files.pythonhosted.org/packages/b6/57/1b090ff183d13cef485dfbe272e2fe57622a76694061353c59da52c9a659/charset_normalizer-3.4.2-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:98f862da73774290f251b9df8d11161b6cf25b599a66baf087c1ffe340e9bfd1", size = 146231, upload-time = "2025-05-02T08:33:02.081Z" }, - { url = "https://files.pythonhosted.org/packages/e2/28/ffc026b26f441fc67bd21ab7f03b313ab3fe46714a14b516f931abe1a2d8/charset_normalizer-3.4.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c9379d65defcab82d07b2a9dfbfc2e95bc8fe0ebb1b176a3190230a3ef0e07c", size = 148243, upload-time = "2025-05-02T08:33:04.063Z" }, - { url = "https://files.pythonhosted.org/packages/c0/0f/9abe9bd191629c33e69e47c6ef45ef99773320e9ad8e9cb08b8ab4a8d4cb/charset_normalizer-3.4.2-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e635b87f01ebc977342e2697d05b56632f5f879a4f15955dfe8cef2448b51691", size = 150442, upload-time = "2025-05-02T08:33:06.418Z" }, - { url = "https://files.pythonhosted.org/packages/67/7c/a123bbcedca91d5916c056407f89a7f5e8fdfce12ba825d7d6b9954a1a3c/charset_normalizer-3.4.2-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:1c95a1e2902a8b722868587c0e1184ad5c55631de5afc0eb96bc4b0d738092c0", size = 145147, upload-time = "2025-05-02T08:33:08.183Z" }, - { url = "https://files.pythonhosted.org/packages/ec/fe/1ac556fa4899d967b83e9893788e86b6af4d83e4726511eaaad035e36595/charset_normalizer-3.4.2-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:ef8de666d6179b009dce7bcb2ad4c4a779f113f12caf8dc77f0162c29d20490b", size = 153057, upload-time = "2025-05-02T08:33:09.986Z" }, - { url = "https://files.pythonhosted.org/packages/2b/ff/acfc0b0a70b19e3e54febdd5301a98b72fa07635e56f24f60502e954c461/charset_normalizer-3.4.2-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:32fc0341d72e0f73f80acb0a2c94216bd704f4f0bce10aedea38f30502b271ff", size = 156454, upload-time = "2025-05-02T08:33:11.814Z" }, - { url = "https://files.pythonhosted.org/packages/92/08/95b458ce9c740d0645feb0e96cea1f5ec946ea9c580a94adfe0b617f3573/charset_normalizer-3.4.2-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:289200a18fa698949d2b39c671c2cc7a24d44096784e76614899a7ccf2574b7b", size = 154174, upload-time = "2025-05-02T08:33:13.707Z" }, - { url = "https://files.pythonhosted.org/packages/78/be/8392efc43487ac051eee6c36d5fbd63032d78f7728cb37aebcc98191f1ff/charset_normalizer-3.4.2-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:4a476b06fbcf359ad25d34a057b7219281286ae2477cc5ff5e3f70a246971148", size = 149166, upload-time = "2025-05-02T08:33:15.458Z" }, - { url = "https://files.pythonhosted.org/packages/44/96/392abd49b094d30b91d9fbda6a69519e95802250b777841cf3bda8fe136c/charset_normalizer-3.4.2-cp313-cp313-win32.whl", hash = "sha256:aaeeb6a479c7667fbe1099af9617c83aaca22182d6cf8c53966491a0f1b7ffb7", size = 98064, upload-time = "2025-05-02T08:33:17.06Z" }, - { url = "https://files.pythonhosted.org/packages/e9/b0/0200da600134e001d91851ddc797809e2fe0ea72de90e09bec5a2fbdaccb/charset_normalizer-3.4.2-cp313-cp313-win_amd64.whl", hash = "sha256:aa6af9e7d59f9c12b33ae4e9450619cf2488e2bbe9b44030905877f0b2324980", size = 105641, upload-time = "2025-05-02T08:33:18.753Z" }, - { url = "https://files.pythonhosted.org/packages/20/94/c5790835a017658cbfabd07f3bfb549140c3ac458cfc196323996b10095a/charset_normalizer-3.4.2-py3-none-any.whl", hash = "sha256:7f56930ab0abd1c45cd15be65cc741c28b1c9a34876ce8c17a2fa107810c0af0", size = 52626, upload-time = "2025-05-02T08:34:40.053Z" }, +version = "3.4.3" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/83/2d/5fd176ceb9b2fc619e63405525573493ca23441330fcdaee6bef9460e924/charset_normalizer-3.4.3.tar.gz", hash = "sha256:6fce4b8500244f6fcb71465d4a4930d132ba9ab8e71a7859e6a5d59851068d14", size = 122371, upload-time = "2025-08-09T07:57:28.46Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/7f/b5/991245018615474a60965a7c9cd2b4efbaabd16d582a5547c47ee1c7730b/charset_normalizer-3.4.3-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:b256ee2e749283ef3ddcff51a675ff43798d92d746d1a6e4631bf8c707d22d0b", size = 204483, upload-time = "2025-08-09T07:55:53.12Z" }, + { url = "https://files.pythonhosted.org/packages/c7/2a/ae245c41c06299ec18262825c1569c5d3298fc920e4ddf56ab011b417efd/charset_normalizer-3.4.3-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:13faeacfe61784e2559e690fc53fa4c5ae97c6fcedb8eb6fb8d0a15b475d2c64", size = 145520, upload-time = "2025-08-09T07:55:54.712Z" }, + { url = "https://files.pythonhosted.org/packages/3a/a4/b3b6c76e7a635748c4421d2b92c7b8f90a432f98bda5082049af37ffc8e3/charset_normalizer-3.4.3-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:00237675befef519d9af72169d8604a067d92755e84fe76492fef5441db05b91", size = 158876, upload-time = "2025-08-09T07:55:56.024Z" }, + { url = "https://files.pythonhosted.org/packages/e2/e6/63bb0e10f90a8243c5def74b5b105b3bbbfb3e7bb753915fe333fb0c11ea/charset_normalizer-3.4.3-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:585f3b2a80fbd26b048a0be90c5aae8f06605d3c92615911c3a2b03a8a3b796f", size = 156083, upload-time = "2025-08-09T07:55:57.582Z" }, + { url = "https://files.pythonhosted.org/packages/87/df/b7737ff046c974b183ea9aa111b74185ac8c3a326c6262d413bd5a1b8c69/charset_normalizer-3.4.3-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:0e78314bdc32fa80696f72fa16dc61168fda4d6a0c014e0380f9d02f0e5d8a07", size = 150295, upload-time = "2025-08-09T07:55:59.147Z" }, + { url = "https://files.pythonhosted.org/packages/61/f1/190d9977e0084d3f1dc169acd060d479bbbc71b90bf3e7bf7b9927dec3eb/charset_normalizer-3.4.3-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:96b2b3d1a83ad55310de8c7b4a2d04d9277d5591f40761274856635acc5fcb30", size = 148379, upload-time = "2025-08-09T07:56:00.364Z" }, + { url = "https://files.pythonhosted.org/packages/4c/92/27dbe365d34c68cfe0ca76f1edd70e8705d82b378cb54ebbaeabc2e3029d/charset_normalizer-3.4.3-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:939578d9d8fd4299220161fdd76e86c6a251987476f5243e8864a7844476ba14", size = 160018, upload-time = "2025-08-09T07:56:01.678Z" }, + { url = "https://files.pythonhosted.org/packages/99/04/baae2a1ea1893a01635d475b9261c889a18fd48393634b6270827869fa34/charset_normalizer-3.4.3-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:fd10de089bcdcd1be95a2f73dbe6254798ec1bda9f450d5828c96f93e2536b9c", size = 157430, upload-time = "2025-08-09T07:56:02.87Z" }, + { url = "https://files.pythonhosted.org/packages/2f/36/77da9c6a328c54d17b960c89eccacfab8271fdaaa228305330915b88afa9/charset_normalizer-3.4.3-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:1e8ac75d72fa3775e0b7cb7e4629cec13b7514d928d15ef8ea06bca03ef01cae", size = 151600, upload-time = "2025-08-09T07:56:04.089Z" }, + { url = "https://files.pythonhosted.org/packages/64/d4/9eb4ff2c167edbbf08cdd28e19078bf195762e9bd63371689cab5ecd3d0d/charset_normalizer-3.4.3-cp311-cp311-win32.whl", hash = "sha256:6cf8fd4c04756b6b60146d98cd8a77d0cdae0e1ca20329da2ac85eed779b6849", size = 99616, upload-time = "2025-08-09T07:56:05.658Z" }, + { url = "https://files.pythonhosted.org/packages/f4/9c/996a4a028222e7761a96634d1820de8a744ff4327a00ada9c8942033089b/charset_normalizer-3.4.3-cp311-cp311-win_amd64.whl", hash = "sha256:31a9a6f775f9bcd865d88ee350f0ffb0e25936a7f930ca98995c05abf1faf21c", size = 107108, upload-time = "2025-08-09T07:56:07.176Z" }, + { url = "https://files.pythonhosted.org/packages/e9/5e/14c94999e418d9b87682734589404a25854d5f5d0408df68bc15b6ff54bb/charset_normalizer-3.4.3-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:e28e334d3ff134e88989d90ba04b47d84382a828c061d0d1027b1b12a62b39b1", size = 205655, upload-time = "2025-08-09T07:56:08.475Z" }, + { url = "https://files.pythonhosted.org/packages/7d/a8/c6ec5d389672521f644505a257f50544c074cf5fc292d5390331cd6fc9c3/charset_normalizer-3.4.3-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:0cacf8f7297b0c4fcb74227692ca46b4a5852f8f4f24b3c766dd94a1075c4884", size = 146223, upload-time = "2025-08-09T07:56:09.708Z" }, + { url = "https://files.pythonhosted.org/packages/fc/eb/a2ffb08547f4e1e5415fb69eb7db25932c52a52bed371429648db4d84fb1/charset_normalizer-3.4.3-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:c6fd51128a41297f5409deab284fecbe5305ebd7e5a1f959bee1c054622b7018", size = 159366, upload-time = "2025-08-09T07:56:11.326Z" }, + { url = "https://files.pythonhosted.org/packages/82/10/0fd19f20c624b278dddaf83b8464dcddc2456cb4b02bb902a6da126b87a1/charset_normalizer-3.4.3-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:3cfb2aad70f2c6debfbcb717f23b7eb55febc0bb23dcffc0f076009da10c6392", size = 157104, upload-time = "2025-08-09T07:56:13.014Z" }, + { url = "https://files.pythonhosted.org/packages/16/ab/0233c3231af734f5dfcf0844aa9582d5a1466c985bbed6cedab85af9bfe3/charset_normalizer-3.4.3-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:1606f4a55c0fd363d754049cdf400175ee96c992b1f8018b993941f221221c5f", size = 151830, upload-time = "2025-08-09T07:56:14.428Z" }, + { url = "https://files.pythonhosted.org/packages/ae/02/e29e22b4e02839a0e4a06557b1999d0a47db3567e82989b5bb21f3fbbd9f/charset_normalizer-3.4.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:027b776c26d38b7f15b26a5da1044f376455fb3766df8fc38563b4efbc515154", size = 148854, upload-time = "2025-08-09T07:56:16.051Z" }, + { url = "https://files.pythonhosted.org/packages/05/6b/e2539a0a4be302b481e8cafb5af8792da8093b486885a1ae4d15d452bcec/charset_normalizer-3.4.3-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:42e5088973e56e31e4fa58eb6bd709e42fc03799c11c42929592889a2e54c491", size = 160670, upload-time = "2025-08-09T07:56:17.314Z" }, + { url = "https://files.pythonhosted.org/packages/31/e7/883ee5676a2ef217a40ce0bffcc3d0dfbf9e64cbcfbdf822c52981c3304b/charset_normalizer-3.4.3-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:cc34f233c9e71701040d772aa7490318673aa7164a0efe3172b2981218c26d93", size = 158501, upload-time = "2025-08-09T07:56:18.641Z" }, + { url = "https://files.pythonhosted.org/packages/c1/35/6525b21aa0db614cf8b5792d232021dca3df7f90a1944db934efa5d20bb1/charset_normalizer-3.4.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:320e8e66157cc4e247d9ddca8e21f427efc7a04bbd0ac8a9faf56583fa543f9f", size = 153173, upload-time = "2025-08-09T07:56:20.289Z" }, + { url = "https://files.pythonhosted.org/packages/50/ee/f4704bad8201de513fdc8aac1cabc87e38c5818c93857140e06e772b5892/charset_normalizer-3.4.3-cp312-cp312-win32.whl", hash = "sha256:fb6fecfd65564f208cbf0fba07f107fb661bcd1a7c389edbced3f7a493f70e37", size = 99822, upload-time = "2025-08-09T07:56:21.551Z" }, + { url = "https://files.pythonhosted.org/packages/39/f5/3b3836ca6064d0992c58c7561c6b6eee1b3892e9665d650c803bd5614522/charset_normalizer-3.4.3-cp312-cp312-win_amd64.whl", hash = "sha256:86df271bf921c2ee3818f0522e9a5b8092ca2ad8b065ece5d7d9d0e9f4849bcc", size = 107543, upload-time = "2025-08-09T07:56:23.115Z" }, + { url = "https://files.pythonhosted.org/packages/65/ca/2135ac97709b400c7654b4b764daf5c5567c2da45a30cdd20f9eefe2d658/charset_normalizer-3.4.3-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:14c2a87c65b351109f6abfc424cab3927b3bdece6f706e4d12faaf3d52ee5efe", size = 205326, upload-time = "2025-08-09T07:56:24.721Z" }, + { url = "https://files.pythonhosted.org/packages/71/11/98a04c3c97dd34e49c7d247083af03645ca3730809a5509443f3c37f7c99/charset_normalizer-3.4.3-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:41d1fc408ff5fdfb910200ec0e74abc40387bccb3252f3f27c0676731df2b2c8", size = 146008, upload-time = "2025-08-09T07:56:26.004Z" }, + { url = "https://files.pythonhosted.org/packages/60/f5/4659a4cb3c4ec146bec80c32d8bb16033752574c20b1252ee842a95d1a1e/charset_normalizer-3.4.3-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:1bb60174149316da1c35fa5233681f7c0f9f514509b8e399ab70fea5f17e45c9", size = 159196, upload-time = "2025-08-09T07:56:27.25Z" }, + { url = "https://files.pythonhosted.org/packages/86/9e/f552f7a00611f168b9a5865a1414179b2c6de8235a4fa40189f6f79a1753/charset_normalizer-3.4.3-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:30d006f98569de3459c2fc1f2acde170b7b2bd265dc1943e87e1a4efe1b67c31", size = 156819, upload-time = "2025-08-09T07:56:28.515Z" }, + { url = "https://files.pythonhosted.org/packages/7e/95/42aa2156235cbc8fa61208aded06ef46111c4d3f0de233107b3f38631803/charset_normalizer-3.4.3-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:416175faf02e4b0810f1f38bcb54682878a4af94059a1cd63b8747244420801f", size = 151350, upload-time = "2025-08-09T07:56:29.716Z" }, + { url = "https://files.pythonhosted.org/packages/c2/a9/3865b02c56f300a6f94fc631ef54f0a8a29da74fb45a773dfd3dcd380af7/charset_normalizer-3.4.3-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:6aab0f181c486f973bc7262a97f5aca3ee7e1437011ef0c2ec04b5a11d16c927", size = 148644, upload-time = "2025-08-09T07:56:30.984Z" }, + { url = "https://files.pythonhosted.org/packages/77/d9/cbcf1a2a5c7d7856f11e7ac2d782aec12bdfea60d104e60e0aa1c97849dc/charset_normalizer-3.4.3-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:fdabf8315679312cfa71302f9bd509ded4f2f263fb5b765cf1433b39106c3cc9", size = 160468, upload-time = "2025-08-09T07:56:32.252Z" }, + { url = "https://files.pythonhosted.org/packages/f6/42/6f45efee8697b89fda4d50580f292b8f7f9306cb2971d4b53f8914e4d890/charset_normalizer-3.4.3-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:bd28b817ea8c70215401f657edef3a8aa83c29d447fb0b622c35403780ba11d5", size = 158187, upload-time = "2025-08-09T07:56:33.481Z" }, + { url = "https://files.pythonhosted.org/packages/70/99/f1c3bdcfaa9c45b3ce96f70b14f070411366fa19549c1d4832c935d8e2c3/charset_normalizer-3.4.3-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:18343b2d246dc6761a249ba1fb13f9ee9a2bcd95decc767319506056ea4ad4dc", size = 152699, upload-time = "2025-08-09T07:56:34.739Z" }, + { url = "https://files.pythonhosted.org/packages/a3/ad/b0081f2f99a4b194bcbb1934ef3b12aa4d9702ced80a37026b7607c72e58/charset_normalizer-3.4.3-cp313-cp313-win32.whl", hash = "sha256:6fb70de56f1859a3f71261cbe41005f56a7842cc348d3aeb26237560bfa5e0ce", size = 99580, upload-time = "2025-08-09T07:56:35.981Z" }, + { url = "https://files.pythonhosted.org/packages/9a/8f/ae790790c7b64f925e5c953b924aaa42a243fb778fed9e41f147b2a5715a/charset_normalizer-3.4.3-cp313-cp313-win_amd64.whl", hash = "sha256:cf1ebb7d78e1ad8ec2a8c4732c7be2e736f6e5123a4146c5b89c9d1f585f8cef", size = 107366, upload-time = "2025-08-09T07:56:37.339Z" }, + { url = "https://files.pythonhosted.org/packages/8e/91/b5a06ad970ddc7a0e513112d40113e834638f4ca1120eb727a249fb2715e/charset_normalizer-3.4.3-cp314-cp314-macosx_10_13_universal2.whl", hash = "sha256:3cd35b7e8aedeb9e34c41385fda4f73ba609e561faedfae0a9e75e44ac558a15", size = 204342, upload-time = "2025-08-09T07:56:38.687Z" }, + { url = "https://files.pythonhosted.org/packages/ce/ec/1edc30a377f0a02689342f214455c3f6c2fbedd896a1d2f856c002fc3062/charset_normalizer-3.4.3-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:b89bc04de1d83006373429975f8ef9e7932534b8cc9ca582e4db7d20d91816db", size = 145995, upload-time = "2025-08-09T07:56:40.048Z" }, + { url = "https://files.pythonhosted.org/packages/17/e5/5e67ab85e6d22b04641acb5399c8684f4d37caf7558a53859f0283a650e9/charset_normalizer-3.4.3-cp314-cp314-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:2001a39612b241dae17b4687898843f254f8748b796a2e16f1051a17078d991d", size = 158640, upload-time = "2025-08-09T07:56:41.311Z" }, + { url = "https://files.pythonhosted.org/packages/f1/e5/38421987f6c697ee3722981289d554957c4be652f963d71c5e46a262e135/charset_normalizer-3.4.3-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:8dcfc373f888e4fb39a7bc57e93e3b845e7f462dacc008d9749568b1c4ece096", size = 156636, upload-time = "2025-08-09T07:56:43.195Z" }, + { url = "https://files.pythonhosted.org/packages/a0/e4/5a075de8daa3ec0745a9a3b54467e0c2967daaaf2cec04c845f73493e9a1/charset_normalizer-3.4.3-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:18b97b8404387b96cdbd30ad660f6407799126d26a39ca65729162fd810a99aa", size = 150939, upload-time = "2025-08-09T07:56:44.819Z" }, + { url = "https://files.pythonhosted.org/packages/02/f7/3611b32318b30974131db62b4043f335861d4d9b49adc6d57c1149cc49d4/charset_normalizer-3.4.3-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:ccf600859c183d70eb47e05a44cd80a4ce77394d1ac0f79dbd2dd90a69a3a049", size = 148580, upload-time = "2025-08-09T07:56:46.684Z" }, + { url = "https://files.pythonhosted.org/packages/7e/61/19b36f4bd67f2793ab6a99b979b4e4f3d8fc754cbdffb805335df4337126/charset_normalizer-3.4.3-cp314-cp314-musllinux_1_2_ppc64le.whl", hash = "sha256:53cd68b185d98dde4ad8990e56a58dea83a4162161b1ea9272e5c9182ce415e0", size = 159870, upload-time = "2025-08-09T07:56:47.941Z" }, + { url = "https://files.pythonhosted.org/packages/06/57/84722eefdd338c04cf3030ada66889298eaedf3e7a30a624201e0cbe424a/charset_normalizer-3.4.3-cp314-cp314-musllinux_1_2_s390x.whl", hash = "sha256:30a96e1e1f865f78b030d65241c1ee850cdf422d869e9028e2fc1d5e4db73b92", size = 157797, upload-time = "2025-08-09T07:56:49.756Z" }, + { url = "https://files.pythonhosted.org/packages/72/2a/aff5dd112b2f14bcc3462c312dce5445806bfc8ab3a7328555da95330e4b/charset_normalizer-3.4.3-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:d716a916938e03231e86e43782ca7878fb602a125a91e7acb8b5112e2e96ac16", size = 152224, upload-time = "2025-08-09T07:56:51.369Z" }, + { url = "https://files.pythonhosted.org/packages/b7/8c/9839225320046ed279c6e839d51f028342eb77c91c89b8ef2549f951f3ec/charset_normalizer-3.4.3-cp314-cp314-win32.whl", hash = "sha256:c6dbd0ccdda3a2ba7c2ecd9d77b37f3b5831687d8dc1b6ca5f56a4880cc7b7ce", size = 100086, upload-time = "2025-08-09T07:56:52.722Z" }, + { url = "https://files.pythonhosted.org/packages/ee/7a/36fbcf646e41f710ce0a563c1c9a343c6edf9be80786edeb15b6f62e17db/charset_normalizer-3.4.3-cp314-cp314-win_amd64.whl", hash = "sha256:73dc19b562516fc9bcf6e5d6e596df0b4eb98d87e4f79f3ae71840e6ed21361c", size = 107400, upload-time = "2025-08-09T07:56:55.172Z" }, + { url = "https://files.pythonhosted.org/packages/8a/1f/f041989e93b001bc4e44bb1669ccdcf54d3f00e628229a85b08d330615c5/charset_normalizer-3.4.3-py3-none-any.whl", hash = "sha256:ce571ab16d890d23b5c278547ba694193a45011ff86a9162a71307ed9f86759a", size = 53175, upload-time = "2025-08-09T07:57:26.864Z" }, ] [[package]] @@ -133,11 +138,11 @@ wheels = [ [[package]] name = "filelock" -version = "3.18.0" +version = "3.19.1" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/0a/10/c23352565a6544bdc5353e0b15fc1c563352101f30e24bf500207a54df9a/filelock-3.18.0.tar.gz", hash = "sha256:adbc88eabb99d2fec8c9c1b229b171f18afa655400173ddc653d5d01501fb9f2", size = 18075, upload-time = "2025-03-14T07:11:40.47Z" } +sdist = { url = "https://files.pythonhosted.org/packages/40/bb/0ab3e58d22305b6f5440629d20683af28959bf793d98d11950e305c1c326/filelock-3.19.1.tar.gz", hash = "sha256:66eda1888b0171c998b35be2bcc0f6d75c388a7ce20c3f3f37aa8e96c2dddf58", size = 17687, upload-time = "2025-08-14T16:56:03.016Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/4d/36/2a115987e2d8c300a974597416d9de88f2444426de9571f4b59b2cca3acc/filelock-3.18.0-py3-none-any.whl", hash = "sha256:c401f4f8377c4464e6db25fff06205fd89bdd83b65eb0488ed1b160f780e21de", size = 16215, upload-time = "2025-03-14T07:11:39.145Z" }, + { url = "https://files.pythonhosted.org/packages/42/14/42b2651a2f46b022ccd948bca9f2d5af0fd8929c4eec235b8d6d844fbe67/filelock-3.19.1-py3-none-any.whl", hash = "sha256:d38e30481def20772f5baf097c122c3babc4fcdb7e14e57049eb9d88c6dc017d", size = 15988, upload-time = "2025-08-14T16:56:01.633Z" }, ] [[package]] @@ -154,11 +159,11 @@ wheels = [ [[package]] name = "identify" -version = "2.6.12" +version = "2.6.13" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/a2/88/d193a27416618628a5eea64e3223acd800b40749a96ffb322a9b55a49ed1/identify-2.6.12.tar.gz", hash = "sha256:d8de45749f1efb108badef65ee8386f0f7bb19a7f26185f74de6367bffbaf0e6", size = 99254, upload-time = "2025-05-23T20:37:53.3Z" } +sdist = { url = "https://files.pythonhosted.org/packages/82/ca/ffbabe3635bb839aa36b3a893c91a9b0d368cb4d8073e03a12896970af82/identify-2.6.13.tar.gz", hash = "sha256:da8d6c828e773620e13bfa86ea601c5a5310ba4bcd65edf378198b56a1f9fb32", size = 99243, upload-time = "2025-08-09T19:35:00.6Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/7a/cd/18f8da995b658420625f7ef13f037be53ae04ec5ad33f9b718240dcfd48c/identify-2.6.12-py2.py3-none-any.whl", hash = "sha256:ad9672d5a72e0d2ff7c5c8809b62dfa60458626352fb0eb7b55e69bdc45334a2", size = 99145, upload-time = "2025-05-23T20:37:51.495Z" }, + { url = "https://files.pythonhosted.org/packages/e7/ce/461b60a3ee109518c055953729bf9ed089a04db895d47e95444071dcdef2/identify-2.6.13-py2.py3-none-any.whl", hash = "sha256:60381139b3ae39447482ecc406944190f690d4a2997f2584062089848361b33b", size = 99153, upload-time = "2025-08-09T19:34:59.1Z" }, ] [[package]] @@ -432,7 +437,7 @@ wheels = [ [[package]] name = "pre-commit" -version = "4.2.0" +version = "4.3.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "cfgv" }, @@ -441,9 +446,9 @@ dependencies = [ { name = "pyyaml" }, { name = "virtualenv" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/08/39/679ca9b26c7bb2999ff122d50faa301e49af82ca9c066ec061cfbc0c6784/pre_commit-4.2.0.tar.gz", hash = "sha256:601283b9757afd87d40c4c4a9b2b5de9637a8ea02eaff7adc2d0fb4e04841146", size = 193424, upload-time = "2025-03-18T21:35:20.987Z" } +sdist = { url = "https://files.pythonhosted.org/packages/ff/29/7cf5bbc236333876e4b41f56e06857a87937ce4bf91e117a6991a2dbb02a/pre_commit-4.3.0.tar.gz", hash = "sha256:499fe450cc9d42e9d58e606262795ecb64dd05438943c62b66f6a8673da30b16", size = 193792, upload-time = "2025-08-09T18:56:14.651Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/88/74/a88bf1b1efeae488a0c0b7bdf71429c313722d1fc0f377537fbe554e6180/pre_commit-4.2.0-py2.py3-none-any.whl", hash = "sha256:a009ca7205f1eb497d10b845e52c838a98b6cdd2102a6c8e4540e94ee75c58bd", size = 220707, upload-time = "2025-03-18T21:35:19.343Z" }, + { url = "https://files.pythonhosted.org/packages/5b/a5/987a405322d78a73b66e39e4a90e4ef156fd7141bf71df987e50717c321b/pre_commit-4.3.0-py2.py3-none-any.whl", hash = "sha256:2b0747ad7e6e967169136edffee14c16e148a778a54e4f967921aa1ebf2308d8", size = 220965, upload-time = "2025-08-09T18:56:13.192Z" }, ] [[package]] @@ -695,27 +700,28 @@ wheels = [ [[package]] name = "ruff" -version = "0.12.8" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/4b/da/5bd7565be729e86e1442dad2c9a364ceeff82227c2dece7c29697a9795eb/ruff-0.12.8.tar.gz", hash = "sha256:4cb3a45525176e1009b2b64126acf5f9444ea59066262791febf55e40493a033", size = 5242373, upload-time = "2025-08-07T19:05:47.268Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/c9/1e/c843bfa8ad1114fab3eb2b78235dda76acd66384c663a4e0415ecc13aa1e/ruff-0.12.8-py3-none-linux_armv6l.whl", hash = "sha256:63cb5a5e933fc913e5823a0dfdc3c99add73f52d139d6cd5cc8639d0e0465513", size = 11675315, upload-time = "2025-08-07T19:05:06.15Z" }, - { url = "https://files.pythonhosted.org/packages/24/ee/af6e5c2a8ca3a81676d5480a1025494fd104b8896266502bb4de2a0e8388/ruff-0.12.8-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:9a9bbe28f9f551accf84a24c366c1aa8774d6748438b47174f8e8565ab9dedbc", size = 12456653, upload-time = "2025-08-07T19:05:09.759Z" }, - { url = "https://files.pythonhosted.org/packages/99/9d/e91f84dfe3866fa648c10512904991ecc326fd0b66578b324ee6ecb8f725/ruff-0.12.8-py3-none-macosx_11_0_arm64.whl", hash = "sha256:2fae54e752a3150f7ee0e09bce2e133caf10ce9d971510a9b925392dc98d2fec", size = 11659690, upload-time = "2025-08-07T19:05:12.551Z" }, - { url = "https://files.pythonhosted.org/packages/fe/ac/a363d25ec53040408ebdd4efcee929d48547665858ede0505d1d8041b2e5/ruff-0.12.8-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c0acbcf01206df963d9331b5838fb31f3b44fa979ee7fa368b9b9057d89f4a53", size = 11896923, upload-time = "2025-08-07T19:05:14.821Z" }, - { url = "https://files.pythonhosted.org/packages/58/9f/ea356cd87c395f6ade9bb81365bd909ff60860975ca1bc39f0e59de3da37/ruff-0.12.8-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ae3e7504666ad4c62f9ac8eedb52a93f9ebdeb34742b8b71cd3cccd24912719f", size = 11477612, upload-time = "2025-08-07T19:05:16.712Z" }, - { url = "https://files.pythonhosted.org/packages/1a/46/92e8fa3c9dcfd49175225c09053916cb97bb7204f9f899c2f2baca69e450/ruff-0.12.8-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cb82efb5d35d07497813a1c5647867390a7d83304562607f3579602fa3d7d46f", size = 13182745, upload-time = "2025-08-07T19:05:18.709Z" }, - { url = "https://files.pythonhosted.org/packages/5e/c4/f2176a310f26e6160deaf661ef60db6c3bb62b7a35e57ae28f27a09a7d63/ruff-0.12.8-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:dbea798fc0065ad0b84a2947b0aff4233f0cb30f226f00a2c5850ca4393de609", size = 14206885, upload-time = "2025-08-07T19:05:21.025Z" }, - { url = "https://files.pythonhosted.org/packages/87/9d/98e162f3eeeb6689acbedbae5050b4b3220754554526c50c292b611d3a63/ruff-0.12.8-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:49ebcaccc2bdad86fd51b7864e3d808aad404aab8df33d469b6e65584656263a", size = 13639381, upload-time = "2025-08-07T19:05:23.423Z" }, - { url = "https://files.pythonhosted.org/packages/81/4e/1b7478b072fcde5161b48f64774d6edd59d6d198e4ba8918d9f4702b8043/ruff-0.12.8-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0ac9c570634b98c71c88cb17badd90f13fc076a472ba6ef1d113d8ed3df109fb", size = 12613271, upload-time = "2025-08-07T19:05:25.507Z" }, - { url = "https://files.pythonhosted.org/packages/e8/67/0c3c9179a3ad19791ef1b8f7138aa27d4578c78700551c60d9260b2c660d/ruff-0.12.8-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:560e0cd641e45591a3e42cb50ef61ce07162b9c233786663fdce2d8557d99818", size = 12847783, upload-time = "2025-08-07T19:05:28.14Z" }, - { url = "https://files.pythonhosted.org/packages/4e/2a/0b6ac3dd045acf8aa229b12c9c17bb35508191b71a14904baf99573a21bd/ruff-0.12.8-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:71c83121512e7743fba5a8848c261dcc454cafb3ef2934a43f1b7a4eb5a447ea", size = 11702672, upload-time = "2025-08-07T19:05:30.413Z" }, - { url = "https://files.pythonhosted.org/packages/9d/ee/f9fdc9f341b0430110de8b39a6ee5fa68c5706dc7c0aa940817947d6937e/ruff-0.12.8-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:de4429ef2ba091ecddedd300f4c3f24bca875d3d8b23340728c3cb0da81072c3", size = 11440626, upload-time = "2025-08-07T19:05:32.492Z" }, - { url = "https://files.pythonhosted.org/packages/89/fb/b3aa2d482d05f44e4d197d1de5e3863feb13067b22c571b9561085c999dc/ruff-0.12.8-py3-none-musllinux_1_2_i686.whl", hash = "sha256:a2cab5f60d5b65b50fba39a8950c8746df1627d54ba1197f970763917184b161", size = 12462162, upload-time = "2025-08-07T19:05:34.449Z" }, - { url = "https://files.pythonhosted.org/packages/18/9f/5c5d93e1d00d854d5013c96e1a92c33b703a0332707a7cdbd0a4880a84fb/ruff-0.12.8-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:45c32487e14f60b88aad6be9fd5da5093dbefb0e3e1224131cb1d441d7cb7d46", size = 12913212, upload-time = "2025-08-07T19:05:36.541Z" }, - { url = "https://files.pythonhosted.org/packages/71/13/ab9120add1c0e4604c71bfc2e4ef7d63bebece0cfe617013da289539cef8/ruff-0.12.8-py3-none-win32.whl", hash = "sha256:daf3475060a617fd5bc80638aeaf2f5937f10af3ec44464e280a9d2218e720d3", size = 11694382, upload-time = "2025-08-07T19:05:38.468Z" }, - { url = "https://files.pythonhosted.org/packages/f6/dc/a2873b7c5001c62f46266685863bee2888caf469d1edac84bf3242074be2/ruff-0.12.8-py3-none-win_amd64.whl", hash = "sha256:7209531f1a1fcfbe8e46bcd7ab30e2f43604d8ba1c49029bb420b103d0b5f76e", size = 12740482, upload-time = "2025-08-07T19:05:40.391Z" }, - { url = "https://files.pythonhosted.org/packages/cb/5c/799a1efb8b5abab56e8a9f2a0b72d12bd64bb55815e9476c7d0a2887d2f7/ruff-0.12.8-py3-none-win_arm64.whl", hash = "sha256:c90e1a334683ce41b0e7a04f41790c429bf5073b62c1ae701c9dc5b3d14f0749", size = 11884718, upload-time = "2025-08-07T19:05:42.866Z" }, +version = "0.12.9" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/4a/45/2e403fa7007816b5fbb324cb4f8ed3c7402a927a0a0cb2b6279879a8bfdc/ruff-0.12.9.tar.gz", hash = "sha256:fbd94b2e3c623f659962934e52c2bea6fc6da11f667a427a368adaf3af2c866a", size = 5254702, upload-time = "2025-08-14T16:08:55.2Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/ad/20/53bf098537adb7b6a97d98fcdebf6e916fcd11b2e21d15f8c171507909cc/ruff-0.12.9-py3-none-linux_armv6l.whl", hash = "sha256:fcebc6c79fcae3f220d05585229463621f5dbf24d79fdc4936d9302e177cfa3e", size = 11759705, upload-time = "2025-08-14T16:08:12.968Z" }, + { url = "https://files.pythonhosted.org/packages/20/4d/c764ee423002aac1ec66b9d541285dd29d2c0640a8086c87de59ebbe80d5/ruff-0.12.9-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:aed9d15f8c5755c0e74467731a007fcad41f19bcce41cd75f768bbd687f8535f", size = 12527042, upload-time = "2025-08-14T16:08:16.54Z" }, + { url = "https://files.pythonhosted.org/packages/8b/45/cfcdf6d3eb5fc78a5b419e7e616d6ccba0013dc5b180522920af2897e1be/ruff-0.12.9-py3-none-macosx_11_0_arm64.whl", hash = "sha256:5b15ea354c6ff0d7423814ba6d44be2807644d0c05e9ed60caca87e963e93f70", size = 11724457, upload-time = "2025-08-14T16:08:18.686Z" }, + { url = "https://files.pythonhosted.org/packages/72/e6/44615c754b55662200c48bebb02196dbb14111b6e266ab071b7e7297b4ec/ruff-0.12.9-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d596c2d0393c2502eaabfef723bd74ca35348a8dac4267d18a94910087807c53", size = 11949446, upload-time = "2025-08-14T16:08:21.059Z" }, + { url = "https://files.pythonhosted.org/packages/fd/d1/9b7d46625d617c7df520d40d5ac6cdcdf20cbccb88fad4b5ecd476a6bb8d/ruff-0.12.9-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:1b15599931a1a7a03c388b9c5df1bfa62be7ede6eb7ef753b272381f39c3d0ff", size = 11566350, upload-time = "2025-08-14T16:08:23.433Z" }, + { url = "https://files.pythonhosted.org/packages/59/20/b73132f66f2856bc29d2d263c6ca457f8476b0bbbe064dac3ac3337a270f/ruff-0.12.9-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3d02faa2977fb6f3f32ddb7828e212b7dd499c59eb896ae6c03ea5c303575756", size = 13270430, upload-time = "2025-08-14T16:08:25.837Z" }, + { url = "https://files.pythonhosted.org/packages/a2/21/eaf3806f0a3d4c6be0a69d435646fba775b65f3f2097d54898b0fd4bb12e/ruff-0.12.9-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:17d5b6b0b3a25259b69ebcba87908496e6830e03acfb929ef9fd4c58675fa2ea", size = 14264717, upload-time = "2025-08-14T16:08:27.907Z" }, + { url = "https://files.pythonhosted.org/packages/d2/82/1d0c53bd37dcb582b2c521d352fbf4876b1e28bc0d8894344198f6c9950d/ruff-0.12.9-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:72db7521860e246adbb43f6ef464dd2a532ef2ef1f5dd0d470455b8d9f1773e0", size = 13684331, upload-time = "2025-08-14T16:08:30.352Z" }, + { url = "https://files.pythonhosted.org/packages/3b/2f/1c5cf6d8f656306d42a686f1e207f71d7cebdcbe7b2aa18e4e8a0cb74da3/ruff-0.12.9-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a03242c1522b4e0885af63320ad754d53983c9599157ee33e77d748363c561ce", size = 12739151, upload-time = "2025-08-14T16:08:32.55Z" }, + { url = "https://files.pythonhosted.org/packages/47/09/25033198bff89b24d734e6479e39b1968e4c992e82262d61cdccaf11afb9/ruff-0.12.9-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9fc83e4e9751e6c13b5046d7162f205d0a7bac5840183c5beebf824b08a27340", size = 12954992, upload-time = "2025-08-14T16:08:34.816Z" }, + { url = "https://files.pythonhosted.org/packages/52/8e/d0dbf2f9dca66c2d7131feefc386523404014968cd6d22f057763935ab32/ruff-0.12.9-py3-none-manylinux_2_31_riscv64.whl", hash = "sha256:881465ed56ba4dd26a691954650de6ad389a2d1fdb130fe51ff18a25639fe4bb", size = 12899569, upload-time = "2025-08-14T16:08:36.852Z" }, + { url = "https://files.pythonhosted.org/packages/a0/bd/b614d7c08515b1428ed4d3f1d4e3d687deffb2479703b90237682586fa66/ruff-0.12.9-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:43f07a3ccfc62cdb4d3a3348bf0588358a66da756aa113e071b8ca8c3b9826af", size = 11751983, upload-time = "2025-08-14T16:08:39.314Z" }, + { url = "https://files.pythonhosted.org/packages/58/d6/383e9f818a2441b1a0ed898d7875f11273f10882f997388b2b51cb2ae8b5/ruff-0.12.9-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:07adb221c54b6bba24387911e5734357f042e5669fa5718920ee728aba3cbadc", size = 11538635, upload-time = "2025-08-14T16:08:41.297Z" }, + { url = "https://files.pythonhosted.org/packages/20/9c/56f869d314edaa9fc1f491706d1d8a47747b9d714130368fbd69ce9024e9/ruff-0.12.9-py3-none-musllinux_1_2_i686.whl", hash = "sha256:f5cd34fabfdea3933ab85d72359f118035882a01bff15bd1d2b15261d85d5f66", size = 12534346, upload-time = "2025-08-14T16:08:43.39Z" }, + { url = "https://files.pythonhosted.org/packages/bd/4b/d8b95c6795a6c93b439bc913ee7a94fda42bb30a79285d47b80074003ee7/ruff-0.12.9-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:f6be1d2ca0686c54564da8e7ee9e25f93bdd6868263805f8c0b8fc6a449db6d7", size = 13017021, upload-time = "2025-08-14T16:08:45.889Z" }, + { url = "https://files.pythonhosted.org/packages/c7/c1/5f9a839a697ce1acd7af44836f7c2181cdae5accd17a5cb85fcbd694075e/ruff-0.12.9-py3-none-win32.whl", hash = "sha256:cc7a37bd2509974379d0115cc5608a1a4a6c4bff1b452ea69db83c8855d53f93", size = 11734785, upload-time = "2025-08-14T16:08:48.062Z" }, + { url = "https://files.pythonhosted.org/packages/fa/66/cdddc2d1d9a9f677520b7cfc490d234336f523d4b429c1298de359a3be08/ruff-0.12.9-py3-none-win_amd64.whl", hash = "sha256:6fb15b1977309741d7d098c8a3cb7a30bc112760a00fb6efb7abc85f00ba5908", size = 12840654, upload-time = "2025-08-14T16:08:50.158Z" }, + { url = "https://files.pythonhosted.org/packages/ac/fd/669816bc6b5b93b9586f3c1d87cd6bc05028470b3ecfebb5938252c47a35/ruff-0.12.9-py3-none-win_arm64.whl", hash = "sha256:63c8c819739d86b96d500cce885956a1a48ab056bbcbc61b747ad494b2485089", size = 11949623, upload-time = "2025-08-14T16:08:52.233Z" }, ] [[package]] @@ -759,16 +765,16 @@ wheels = [ [[package]] name = "virtualenv" -version = "20.33.1" +version = "20.34.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "distlib" }, { name = "filelock" }, { name = "platformdirs" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/8b/60/4f20960df6c7b363a18a55ab034c8f2bcd5d9770d1f94f9370ec104c1855/virtualenv-20.33.1.tar.gz", hash = "sha256:1b44478d9e261b3fb8baa5e74a0ca3bc0e05f21aa36167bf9cbf850e542765b8", size = 6082160, upload-time = "2025-08-05T16:10:55.605Z" } +sdist = { url = "https://files.pythonhosted.org/packages/1c/14/37fcdba2808a6c615681cd216fecae00413c9dab44fb2e57805ecf3eaee3/virtualenv-20.34.0.tar.gz", hash = "sha256:44815b2c9dee7ed86e387b842a84f20b93f7f417f95886ca1996a72a4138eb1a", size = 6003808, upload-time = "2025-08-13T14:24:07.464Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/ca/ff/ded57ac5ff40a09e6e198550bab075d780941e0b0f83cbeabd087c59383a/virtualenv-20.33.1-py3-none-any.whl", hash = "sha256:07c19bc66c11acab6a5958b815cbcee30891cd1c2ccf53785a28651a0d8d8a67", size = 6060362, upload-time = "2025-08-05T16:10:52.81Z" }, + { url = "https://files.pythonhosted.org/packages/76/06/04c8e804f813cf972e3262f3f8584c232de64f0cde9f703b46cf53a45090/virtualenv-20.34.0-py3-none-any.whl", hash = "sha256:341f5afa7eee943e4984a9207c025feedd768baff6753cd660c857ceb3e36026", size = 5983279, upload-time = "2025-08-13T14:24:05.111Z" }, ] [[package]] From a87a14882c4ccd8cc213415226294a008c769eb6 Mon Sep 17 00:00:00 2001 From: HugoOnghai Date: Fri, 15 Aug 2025 17:48:44 -0700 Subject: [PATCH 65/65] Fixed unused exception variable e --- tests/elink_service_test.py | 35 +++++++++++++++++++++-------------- 1 file changed, 21 insertions(+), 14 deletions(-) diff --git a/tests/elink_service_test.py b/tests/elink_service_test.py index 346e264..938a507 100644 --- a/tests/elink_service_test.py +++ b/tests/elink_service_test.py @@ -13,6 +13,7 @@ load_dotenv() + def test_get_single_record(elink_production_client): """ tried to use the production client to retrieve a record. @@ -53,6 +54,7 @@ def test_query_exists(elink_review_client): """ assert isinstance(next(elink_review_client.query_records()), RecordResponse) + def test_switching_states(elink_review_client): """ This test repeats the tests done to demonstrate unexpected behavior or passing "save" and "submit" states present in Elinkapi 0.5.2. @@ -81,34 +83,37 @@ def test_switching_states(elink_review_client): try: my_rr = elink_review_client.post_new_record(my_record, "save") osti_id = my_rr.osti_id - assert my_rr.workflow_status == 'SA' + assert my_rr.workflow_status == "SA" assert my_rr.revision == 1 got_record = elink_review_client.get_single_record(osti_id) record_updated_state = elink_review_client.update_record( osti_id, got_record, "submit" ) - assert record_updated_state.workflow_status == 'SO' + assert record_updated_state.workflow_status == "SO" assert record_updated_state.revision == 2 core.delete_osti_record(elink_review_client, osti_id, "Test completed!") - except Exception as e: + except Exception: core.delete_osti_record(elink_review_client, osti_id, "Test failed!") pytest.fail("Test failed!") - # submit in post then update to save try: record_submit_first = elink_review_client.post_new_record(my_record, "submit") osti_id = record_submit_first.osti_id - assert record_submit_first.workflow_status == 'SO' + assert record_submit_first.workflow_status == "SO" assert record_submit_first.revision == 1 got_submitted_record = elink_review_client.get_single_record(osti_id) - record_updated_state = elink_review_client.update_record(osti_id, got_submitted_record, "save") - assert record_updated_state.workflow_status == 'SA' # record was submitted but switched to save, so should be 'SA' + record_updated_state = elink_review_client.update_record( + osti_id, got_submitted_record, "save" + ) + assert ( + record_updated_state.workflow_status == "SA" + ) # record was submitted but switched to save, so should be 'SA' assert record_updated_state.revision == 2 core.delete_osti_record(elink_review_client, osti_id, "Test completed!") - except Exception as e: + except Exception: core.delete_osti_record(elink_review_client, osti_id, "Test failed!") pytest.fail("Test failed!") @@ -116,15 +121,17 @@ def test_switching_states(elink_review_client): try: record_to_manual_update = elink_review_client.post_new_record(my_record, "save") osti_id = record_to_manual_update.osti_id - assert record_to_manual_update.workflow_status == 'SA' + assert record_to_manual_update.workflow_status == "SA" assert record_to_manual_update.revision == 1 got_record_to_manual_update = elink_review_client.get_single_record(osti_id) - got_record_to_manual_update.workflow_status = 'SO' - record_after_manual_update = elink_review_client.update_record(osti_id, got_record_to_manual_update, "submit") - assert record_after_manual_update.workflow_status == 'SO' + got_record_to_manual_update.workflow_status = "SO" + record_after_manual_update = elink_review_client.update_record( + osti_id, got_record_to_manual_update, "submit" + ) + assert record_after_manual_update.workflow_status == "SO" assert record_after_manual_update.revision == 2 core.delete_osti_record(elink_review_client, osti_id, "Test completed!") - except Exception as e: + except Exception: core.delete_osti_record(elink_review_client, osti_id, "Test failed!") - pytest.fail("Test failed!") \ No newline at end of file + pytest.fail("Test failed!")