Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -877,7 +877,7 @@ fun uniffi_superposition_core_fn_method_providercache_eval_config(`ptr`: Pointer
): RustBuffer.ByValue
fun uniffi_superposition_core_fn_method_providercache_filter_config(`ptr`: Pointer,`dimensionData`: RustBuffer.ByValue,`prefix`: RustBuffer.ByValue,uniffi_out_err: UniffiRustCallStatus,
): RustBufferConfig.ByValue
fun uniffi_superposition_core_fn_method_providercache_filter_experiment(`ptr`: Pointer,`dimensionData`: RustBuffer.ByValue,`prefix`: RustBuffer.ByValue,uniffi_out_err: UniffiRustCallStatus,
fun uniffi_superposition_core_fn_method_providercache_filter_experiment(`ptr`: Pointer,`dimensionData`: RustBuffer.ByValue,`prefix`: RustBuffer.ByValue,`partialApply`: Byte,uniffi_out_err: UniffiRustCallStatus,
): RustBuffer.ByValue
fun uniffi_superposition_core_fn_method_providercache_get_applicable_variants(`ptr`: Pointer,`dimensionData`: RustBuffer.ByValue,`prefix`: RustBuffer.ByValue,`targetingKey`: RustBuffer.ByValue,uniffi_out_err: UniffiRustCallStatus,
): RustBuffer.ByValue
Expand Down Expand Up @@ -1047,7 +1047,7 @@ private fun uniffiCheckApiChecksums(lib: IntegrityCheckingUniffiLib) {
if (lib.uniffi_superposition_core_checksum_method_providercache_filter_config() != 21761.toShort()) {
throw RuntimeException("UniFFI API checksum mismatch: try cleaning and rebuilding your project")
}
if (lib.uniffi_superposition_core_checksum_method_providercache_filter_experiment() != 60575.toShort()) {
if (lib.uniffi_superposition_core_checksum_method_providercache_filter_experiment() != 31120.toShort()) {
throw RuntimeException("UniFFI API checksum mismatch: try cleaning and rebuilding your project")
}
if (lib.uniffi_superposition_core_checksum_method_providercache_get_applicable_variants() != 12269.toShort()) {
Expand Down Expand Up @@ -1228,6 +1228,29 @@ public object FfiConverterUByte: FfiConverter<UByte, Byte> {
}
}

/**
* @suppress
*/
public object FfiConverterBoolean: FfiConverter<Boolean, Byte> {
override fun lift(value: Byte): Boolean {
return value.toInt() != 0
}

override fun read(buf: ByteBuffer): Boolean {
return lift(buf.get())
}

override fun lower(value: Boolean): Byte {
return if (value) 1.toByte() else 0.toByte()
}

override fun allocationSize(value: Boolean) = 1UL

override fun write(value: Boolean, buf: ByteBuffer) {
buf.put(lower(value))
}
}

/**
* @suppress
*/
Expand Down Expand Up @@ -1390,7 +1413,7 @@ public interface ProviderCacheInterface {

fun `filterConfig`(`dimensionData`: Map<kotlin.String, kotlin.String>?, `prefix`: List<kotlin.String>?): Config

fun `filterExperiment`(`dimensionData`: Map<kotlin.String, kotlin.String>?, `prefix`: List<kotlin.String>?): ExperimentConfig
fun `filterExperiment`(`dimensionData`: Map<kotlin.String, kotlin.String>?, `prefix`: List<kotlin.String>?, `partialApply`: kotlin.Boolean): ExperimentConfig

fun `getApplicableVariants`(`dimensionData`: Map<kotlin.String, kotlin.String>?, `prefix`: List<kotlin.String>?, `targetingKey`: kotlin.String): List<kotlin.String>

Expand Down Expand Up @@ -1517,12 +1540,12 @@ open class ProviderCache: Disposable, AutoCloseable, ProviderCacheInterface



@Throws(OperationException::class)override fun `filterExperiment`(`dimensionData`: Map<kotlin.String, kotlin.String>?, `prefix`: List<kotlin.String>?): ExperimentConfig {
@Throws(OperationException::class)override fun `filterExperiment`(`dimensionData`: Map<kotlin.String, kotlin.String>?, `prefix`: List<kotlin.String>?, `partialApply`: kotlin.Boolean): ExperimentConfig {
return FfiConverterTypeExperimentConfig.lift(
callWithPointer {
uniffiRustCallWithError(OperationException) { _status ->
UniffiLib.INSTANCE.uniffi_superposition_core_fn_method_providercache_filter_experiment(
it, FfiConverterOptionalMapStringString.lower(`dimensionData`),FfiConverterOptionalSequenceString.lower(`prefix`),_status)
it, FfiConverterOptionalMapStringString.lower(`dimensionData`),FfiConverterOptionalSequenceString.lower(`prefix`),FfiConverterBoolean.lower(`partialApply`),_status)
}
}
)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -511,7 +511,7 @@ def _uniffi_check_api_checksums(lib):
raise InternalError("UniFFI API checksum mismatch: try cleaning and rebuilding your project")
if lib.uniffi_superposition_core_checksum_method_providercache_filter_config() != 21761:
raise InternalError("UniFFI API checksum mismatch: try cleaning and rebuilding your project")
if lib.uniffi_superposition_core_checksum_method_providercache_filter_experiment() != 60575:
if lib.uniffi_superposition_core_checksum_method_providercache_filter_experiment() != 31120:
raise InternalError("UniFFI API checksum mismatch: try cleaning and rebuilding your project")
if lib.uniffi_superposition_core_checksum_method_providercache_get_applicable_variants() != 12269:
raise InternalError("UniFFI API checksum mismatch: try cleaning and rebuilding your project")
Expand Down Expand Up @@ -661,6 +661,7 @@ class _UniffiForeignFutureStructVoid(ctypes.Structure):
ctypes.c_void_p,
_UniffiRustBuffer,
_UniffiRustBuffer,
ctypes.c_int8,
ctypes.POINTER(_UniffiRustCallStatus),
)
_UniffiLib.uniffi_superposition_core_fn_method_providercache_filter_experiment.restype = _UniffiRustBuffer
Expand Down Expand Up @@ -1068,6 +1069,27 @@ def read(buf):
def write(value, buf):
buf.write_u8(value)

class _UniffiConverterBool:
@classmethod
def check_lower(cls, value):
return not not value

@classmethod
def lower(cls, value):
return 1 if value else 0

@staticmethod
def lift(value):
return value != 0

@classmethod
def read(cls, buf):
return cls.lift(buf.read_u8())

@classmethod
def write(cls, value, buf):
buf.write_u8(value)

class _UniffiConverterString:
@staticmethod
def check_lower(value):
Expand Down Expand Up @@ -1747,7 +1769,7 @@ def eval_config(self, query_data: "dict[str, str]",merge_strategy: "MergeStrateg
raise NotImplementedError
def filter_config(self, dimension_data: "typing.Optional[dict[str, str]]",prefix: "typing.Optional[typing.List[str]]"):
raise NotImplementedError
def filter_experiment(self, dimension_data: "typing.Optional[dict[str, str]]",prefix: "typing.Optional[typing.List[str]]"):
def filter_experiment(self, dimension_data: "typing.Optional[dict[str, str]]",prefix: "typing.Optional[typing.List[str]]",partial_apply: "bool"):
raise NotImplementedError
def get_applicable_variants(self, dimension_data: "typing.Optional[dict[str, str]]",prefix: "typing.Optional[typing.List[str]]",targeting_key: "str"):
raise NotImplementedError
Expand Down Expand Up @@ -1816,15 +1838,18 @@ def filter_config(self, dimension_data: "typing.Optional[dict[str, str]]",prefix



def filter_experiment(self, dimension_data: "typing.Optional[dict[str, str]]",prefix: "typing.Optional[typing.List[str]]") -> "ExperimentConfig":
def filter_experiment(self, dimension_data: "typing.Optional[dict[str, str]]",prefix: "typing.Optional[typing.List[str]]",partial_apply: "bool") -> "ExperimentConfig":
_UniffiConverterOptionalMapStringString.check_lower(dimension_data)

_UniffiConverterOptionalSequenceString.check_lower(prefix)

_UniffiConverterBool.check_lower(partial_apply)

return _UniffiConverterTypeExperimentConfig.lift(
_uniffi_rust_call_with_error(_UniffiConverterTypeOperationError,_UniffiLib.uniffi_superposition_core_fn_method_providercache_filter_experiment,self._uniffi_clone_pointer(),
_UniffiConverterOptionalMapStringString.lower(dimension_data),
_UniffiConverterOptionalSequenceString.lower(prefix))
_UniffiConverterOptionalSequenceString.lower(prefix),
_UniffiConverterBool.lower(partial_apply))
)


Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -77,7 +77,6 @@ class SuperpositionDataSource(ABC):
while consumers interact with this unified interface.
"""

@abstractmethod
async def fetch_config(
self,
if_modified_since: Optional[datetime] = None,
Expand All @@ -90,7 +89,7 @@ async def fetch_config(
Returns:
FetchResponse with ConfigData or NotModified status.
"""
pass
return await self.fetch_filtered_config(if_modified_since=if_modified_since)

@abstractmethod
async def fetch_filtered_config(
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -100,17 +100,24 @@ def __init__(
else:
raise ValueError(f"Unsupported file format: {file_path}")

async def _fetch_config_with_filters(
async def fetch_filtered_config(
self,
context: Optional[Dict[str, Any]] = None,
prefix_filter: Optional[List[str]] = None,
if_modified_since: Optional[datetime] = None,
) -> FetchResponse[ConfigData]:
"""Fetch configuration from file, applying filters and 304 Not Modified logic.
"""Fetch configuration, optionally filtered.

Note: File-based filtering is not efficient; consider using HttpDataSource
for production configurations that need filtering.

Args:
context: Optional context for filtering (ignored).
Copy link

Copilot AI Apr 11, 2026

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Docstring says context is “ignored”, but this method passes context into _parse_config_file(...) (and the helper JSON-encodes it for the FFI call). Update the docstring to reflect that context is actually applied during parsing/filtering.

Suggested change
context: Optional context for filtering (ignored).
context: Optional context applied during parsing/filtering.

Copilot uses AI. Check for mistakes.
prefix_filter: Optional key prefixes to include.
if_modified_since: Timestamp for 304 Not Modified check.

Returns:
FetchResponse with ConfigData or NotModified status.
"""
if if_modified_since is not None:
logger.debug("FileDataSource: ignoring if_modified_since, always reading fresh from file")
Expand All @@ -131,41 +138,6 @@ async def _fetch_config_with_filters(
logger.error(f"Failed to fetch config from {self.file_path}: {e}")
raise

async def fetch_config(
self,
if_modified_since: Optional[datetime] = None,
) -> FetchResponse[ConfigData]:
"""Fetch configuration from file.

Args:
if_modified_since: Timestamp for 304 Not Modified check.

Returns:
FetchResponse with ConfigData or NotModified status.
"""
return await self._fetch_config_with_filters(if_modified_since=if_modified_since)

async def fetch_filtered_config(
self,
context: Optional[Dict[str, Any]] = None,
prefix_filter: Optional[List[str]] = None,
if_modified_since: Optional[datetime] = None,
) -> FetchResponse[ConfigData]:
"""Fetch configuration, optionally filtered.

Note: File-based filtering is not efficient; consider using HttpDataSource
for production configurations that need filtering.

Args:
context: Optional context for filtering (ignored).
prefix_filter: Optional key prefixes to include.
if_modified_since: Timestamp for 304 Not Modified check.

Returns:
FetchResponse with ConfigData or NotModified status.
"""
return await self._fetch_config_with_filters(context, prefix_filter, if_modified_since)

async def fetch_active_experiments(
self,
if_modified_since: Optional[datetime] = None,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -62,12 +62,22 @@ def _create_client(self) -> Superposition:
# Create Superposition client
return Superposition(config=sdk_config)

async def _fetch_config_with_filters(
async def fetch_filtered_config(
self,
context: Optional[Dict[str, Any]] = None,
prefix_filter: Optional[List[str]] = None,
if_modified_since: Optional[datetime] = None
if_modified_since: Optional[datetime] = None,
) -> FetchResponse[ConfigData]:
"""Fetch resolved configuration filtered by context and prefixes.

Args:
context: Optional context for filtering.
prefix_filter: Optional list of key prefixes to include.
if_modified_since: Optional timestamp for 304 Not Modified check.

Returns:
FetchResponse with ConfigData or NotModified status.
"""
try:
context = {k: Document(v) for k, v in context.items()} if context else None
response = await self.client.get_config(
Expand All @@ -89,38 +99,6 @@ async def _fetch_config_with_filters(
except Exception as e:
raise e

async def fetch_config(
self,
if_modified_since: Optional[datetime] = None,
) -> FetchResponse:
"""Fetch full resolved configuration.

Args:
if_modified_since: Optional timestamp for 304 Not Modified check.

Returns:
FetchResponse with ConfigData or NotModified status.
"""
return await self._fetch_config_with_filters(if_modified_since=if_modified_since)

async def fetch_filtered_config(
self,
context: Optional[Dict[str, Any]] = None,
prefix_filter: Optional[List[str]] = None,
if_modified_since: Optional[datetime] = None,
) -> FetchResponse:
"""Fetch resolved configuration filtered by context and prefixes.

Args:
context: Optional context for filtering.
prefix_filter: Optional list of key prefixes to include.
if_modified_since: Optional timestamp for 304 Not Modified check.

Returns:
FetchResponse with ConfigData or NotModified status.
"""
return await self._fetch_config_with_filters(context, prefix_filter, if_modified_since)

async def _fetch_filtered_experiment(
self,
context: Optional[Dict[str, Any]] = None,
Expand Down Expand Up @@ -173,7 +151,7 @@ async def fetch_candidate_active_experiments(
context: Optional[Dict[str, Any]] = None,
prefix_filter: Optional[List[str]] = None,
if_modified_since: Optional[datetime] = None,
) -> FetchResponse:
) -> FetchResponse[ExperimentData]:
"""Fetch active experiments with candidate conditions.

Args:
Expand Down
Loading
Loading