From 8c49e5747a663384fa408d3b847273c32429530f Mon Sep 17 00:00:00 2001 From: Shahrad Date: Mon, 21 Jul 2025 14:35:01 -0400 Subject: [PATCH 1/3] increase prefernce includsion probs --- aif_gen/cli/commands/generate.py | 2 +- aif_gen/generate/mappers/response_mapper.py | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/aif_gen/cli/commands/generate.py b/aif_gen/cli/commands/generate.py index a516cab8..1a7ec143 100644 --- a/aif_gen/cli/commands/generate.py +++ b/aif_gen/cli/commands/generate.py @@ -7,7 +7,7 @@ import click import openai -import yaml # type: ignore +import yaml from aif_gen.generate.engine import generate_continual_dataset from aif_gen.util.hf import upload_to_hf diff --git a/aif_gen/generate/mappers/response_mapper.py b/aif_gen/generate/mappers/response_mapper.py index e67cf37e..6de6eacb 100644 --- a/aif_gen/generate/mappers/response_mapper.py +++ b/aif_gen/generate/mappers/response_mapper.py @@ -15,8 +15,8 @@ class ResponseMapper(ResponseMapperBase): """ NUM_PREFERENCE_AXES_SAMPLES: int = 3 - PREFERENCE_INCLUSION_PROB_POS: float = 0.5 - PREFERENCE_INCLUSION_PROB_NEG: float = 0.5 + PREFERENCE_INCLUSION_PROB_POS: float = 0.9 + PREFERENCE_INCLUSION_PROB_NEG: float = 0.9 def __init__(self, suffix_context: Optional[str] = None) -> None: self._suffix_context = suffix_context From 213b62b2e3011d493cd375f171d98c6dd40761c6 Mon Sep 17 00:00:00 2001 From: Shahrad Date: Tue, 22 Jul 2025 11:35:53 -0400 Subject: [PATCH 2/3] caching nonetype bug resolve --- aif_gen/generate/engine.py | 12 +++++++++--- aif_gen/generate/mappers/response_mapper.py | 4 ++-- 2 files changed, 11 insertions(+), 5 deletions(-) diff --git a/aif_gen/generate/engine.py b/aif_gen/generate/engine.py index 5187a701..beab95c2 100644 --- a/aif_gen/generate/engine.py +++ b/aif_gen/generate/engine.py @@ -271,9 +271,11 @@ async def _generate_sample( meta_prompt_nonce = f'{prompt_idx}' async with async_semaphore: + output = None if cache is not None: output = await cache.get(meta_prompt, nonce=meta_prompt_nonce) - else: + + if output is None: # Cache miss or no cache - make API call response = await client.chat.completions.create( model=model_name, messages=[{'role': 'user', 'content': meta_prompt}], @@ -306,9 +308,11 @@ async def _generate_sample( task_prompt = task_prompt1 + task_prompt2 async with async_semaphore: + output = None if cache is not None: output = await cache.get(task_prompt1 + task_prompt2) - else: + + if output is None: # Cache miss or no cache - make API calls futures = [] for response_prompt in [task_prompt1, task_prompt2]: coro = client.chat.completions.create( @@ -346,9 +350,11 @@ async def _generate_sample( else: task_prompt = response_mapper.generate_prompt(task, prompt) async with async_semaphore: + output = None if cache is not None: output = await cache.get(task_prompt) - else: + + if output is None: # Cache miss or no cache - make API call response = await client.chat.completions.create( model=model_name, messages=[{'role': 'user', 'content': task_prompt}], diff --git a/aif_gen/generate/mappers/response_mapper.py b/aif_gen/generate/mappers/response_mapper.py index 6de6eacb..c5923520 100644 --- a/aif_gen/generate/mappers/response_mapper.py +++ b/aif_gen/generate/mappers/response_mapper.py @@ -15,8 +15,8 @@ class ResponseMapper(ResponseMapperBase): """ NUM_PREFERENCE_AXES_SAMPLES: int = 3 - PREFERENCE_INCLUSION_PROB_POS: float = 0.9 - PREFERENCE_INCLUSION_PROB_NEG: float = 0.9 + PREFERENCE_INCLUSION_PROB_POS: float = 0.8 + PREFERENCE_INCLUSION_PROB_NEG: float = 0.8 def __init__(self, suffix_context: Optional[str] = None) -> None: self._suffix_context = suffix_context From 25df292b8775916f5857cf4b36cc2824715b3258 Mon Sep 17 00:00:00 2001 From: Shahrad Date: Tue, 22 Jul 2025 12:06:41 -0400 Subject: [PATCH 3/3] typing error fix --- aif_gen/cli/commands/generate.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/aif_gen/cli/commands/generate.py b/aif_gen/cli/commands/generate.py index 1a7ec143..a516cab8 100644 --- a/aif_gen/cli/commands/generate.py +++ b/aif_gen/cli/commands/generate.py @@ -7,7 +7,7 @@ import click import openai -import yaml +import yaml # type: ignore from aif_gen.generate.engine import generate_continual_dataset from aif_gen.util.hf import upload_to_hf