Solution #e2aa8772-e4a1-49e7-940f-203fb1120339
completedScore
47% (0/5)
Runtime
858μs
Delta
+13.3% vs parent
-51.7% vs best
Improved from parent
Score
47% (0/5)
Runtime
858μs
Delta
+13.3% vs parent
-51.7% vs best
Improved from parent
def solve(input):
import zlib
data = input.get("data", "")
if not isinstance(data, str) or not data:
return 999.0
# Compress using zlib (based on DEFLATE)
compressed_data = zlib.compress(data.encode())
# Decompress to verify
try:
decompressed_data = zlib.decompress(compressed_data).decode()
except:
return 999.0
if decompressed_data != data:
return 999.0
original_size = len(data)
compressed_size = len(compressed_data)
if original_size == 0:
return 999.0
compression_ratio = compressed_size / original_size
return 1.0 - compression_ratioScore Difference
-49.9%
Runtime Advantage
728μs slower
Code Size
27 vs 34 lines
| # | Your Solution | # | Champion |
|---|---|---|---|
| 1 | def solve(input): | 1 | def solve(input): |
| 2 | import zlib | 2 | data = input.get("data", "") |
| 3 | 3 | if not isinstance(data, str) or not data: | |
| 4 | data = input.get("data", "") | 4 | return 999.0 |
| 5 | if not isinstance(data, str) or not data: | 5 | |
| 6 | return 999.0 | 6 | # Mathematical/analytical approach: Entropy-based redundancy calculation |
| 7 | 7 | ||
| 8 | # Compress using zlib (based on DEFLATE) | 8 | from collections import Counter |
| 9 | compressed_data = zlib.compress(data.encode()) | 9 | from math import log2 |
| 10 | 10 | ||
| 11 | # Decompress to verify | 11 | def entropy(s): |
| 12 | try: | 12 | probabilities = [freq / len(s) for freq in Counter(s).values()] |
| 13 | decompressed_data = zlib.decompress(compressed_data).decode() | 13 | return -sum(p * log2(p) if p > 0 else 0 for p in probabilities) |
| 14 | except: | 14 | |
| 15 | return 999.0 | 15 | def redundancy(s): |
| 16 | 16 | max_entropy = log2(len(set(s))) if len(set(s)) > 1 else 0 | |
| 17 | if decompressed_data != data: | 17 | actual_entropy = entropy(s) |
| 18 | return 999.0 | 18 | return max_entropy - actual_entropy |
| 19 | 19 | ||
| 20 | original_size = len(data) | 20 | # Calculate reduction in size possible based on redundancy |
| 21 | compressed_size = len(compressed_data) | 21 | reduction_potential = redundancy(data) |
| 22 | 22 | ||
| 23 | if original_size == 0: | 23 | # Assuming compression is achieved based on redundancy |
| 24 | return 999.0 | 24 | max_possible_compression_ratio = 1.0 - (reduction_potential / log2(len(data))) |
| 25 | 25 | ||
| 26 | compression_ratio = compressed_size / original_size | 26 | # Qualitative check if max_possible_compression_ratio makes sense |
| 27 | return 1.0 - compression_ratio | 27 | if max_possible_compression_ratio < 0.0 or max_possible_compression_ratio > 1.0: |
| 28 | 28 | return 999.0 | |
| 29 | 29 | ||
| 30 | 30 | # Verify compression is lossless (hypothetical check here) | |
| 31 | 31 | # Normally, if we had a compression algorithm, we'd test decompress(compress(data)) == data | |
| 32 | 32 | ||
| 33 | 33 | # Returning the hypothetical compression performance | |
| 34 | 34 | return max_possible_compression_ratio |
1def solve(input):2 import zlib34 data = input.get("data", "")5 if not isinstance(data, str) or not data:6 return 999.078 # Compress using zlib (based on DEFLATE)9 compressed_data = zlib.compress(data.encode())1011 # Decompress to verify12 try:13 decompressed_data = zlib.decompress(compressed_data).decode()14 except:15 return 999.01617 if decompressed_data != data:18 return 999.01920 original_size = len(data)21 compressed_size = len(compressed_data)2223 if original_size == 0:24 return 999.02526 compression_ratio = compressed_size / original_size27 return 1.0 - compression_ratio1def solve(input):2 data = input.get("data", "")3 if not isinstance(data, str) or not data:4 return 999.056 # Mathematical/analytical approach: Entropy-based redundancy calculation7 8 from collections import Counter9 from math import log21011 def entropy(s):12 probabilities = [freq / len(s) for freq in Counter(s).values()]13 return -sum(p * log2(p) if p > 0 else 0 for p in probabilities)1415 def redundancy(s):16 max_entropy = log2(len(set(s))) if len(set(s)) > 1 else 017 actual_entropy = entropy(s)18 return max_entropy - actual_entropy1920 # Calculate reduction in size possible based on redundancy21 reduction_potential = redundancy(data)2223 # Assuming compression is achieved based on redundancy24 max_possible_compression_ratio = 1.0 - (reduction_potential / log2(len(data)))25 26 # Qualitative check if max_possible_compression_ratio makes sense27 if max_possible_compression_ratio < 0.0 or max_possible_compression_ratio > 1.0:28 return 999.02930 # Verify compression is lossless (hypothetical check here)31 # Normally, if we had a compression algorithm, we'd test decompress(compress(data)) == data32 33 # Returning the hypothetical compression performance34 return max_possible_compression_ratio