hexsha
string | size
int64 | ext
string | lang
string | max_stars_repo_path
string | max_stars_repo_name
string | max_stars_repo_head_hexsha
string | max_stars_repo_licenses
list | max_stars_count
int64 | max_stars_repo_stars_event_min_datetime
string | max_stars_repo_stars_event_max_datetime
string | max_issues_repo_path
string | max_issues_repo_name
string | max_issues_repo_head_hexsha
string | max_issues_repo_licenses
list | max_issues_count
int64 | max_issues_repo_issues_event_min_datetime
string | max_issues_repo_issues_event_max_datetime
string | max_forks_repo_path
string | max_forks_repo_name
string | max_forks_repo_head_hexsha
string | max_forks_repo_licenses
list | max_forks_count
int64 | max_forks_repo_forks_event_min_datetime
string | max_forks_repo_forks_event_max_datetime
string | content
string | avg_line_length
float64 | max_line_length
int64 | alphanum_fraction
float64 | qsc_code_num_words_quality_signal
int64 | qsc_code_num_chars_quality_signal
float64 | qsc_code_mean_word_length_quality_signal
float64 | qsc_code_frac_words_unique_quality_signal
float64 | qsc_code_frac_chars_top_2grams_quality_signal
float64 | qsc_code_frac_chars_top_3grams_quality_signal
float64 | qsc_code_frac_chars_top_4grams_quality_signal
float64 | qsc_code_frac_chars_dupe_5grams_quality_signal
float64 | qsc_code_frac_chars_dupe_6grams_quality_signal
float64 | qsc_code_frac_chars_dupe_7grams_quality_signal
float64 | qsc_code_frac_chars_dupe_8grams_quality_signal
float64 | qsc_code_frac_chars_dupe_9grams_quality_signal
float64 | qsc_code_frac_chars_dupe_10grams_quality_signal
float64 | qsc_code_frac_chars_replacement_symbols_quality_signal
float64 | qsc_code_frac_chars_digital_quality_signal
float64 | qsc_code_frac_chars_whitespace_quality_signal
float64 | qsc_code_size_file_byte_quality_signal
float64 | qsc_code_num_lines_quality_signal
float64 | qsc_code_num_chars_line_max_quality_signal
float64 | qsc_code_num_chars_line_mean_quality_signal
float64 | qsc_code_frac_chars_alphabet_quality_signal
float64 | qsc_code_frac_chars_comments_quality_signal
float64 | qsc_code_cate_xml_start_quality_signal
float64 | qsc_code_frac_lines_dupe_lines_quality_signal
float64 | qsc_code_cate_autogen_quality_signal
float64 | qsc_code_frac_lines_long_string_quality_signal
float64 | qsc_code_frac_chars_string_length_quality_signal
float64 | qsc_code_frac_chars_long_word_length_quality_signal
float64 | qsc_code_frac_lines_string_concat_quality_signal
float64 | qsc_code_cate_encoded_data_quality_signal
float64 | qsc_code_frac_chars_hex_words_quality_signal
float64 | qsc_code_frac_lines_prompt_comments_quality_signal
float64 | qsc_code_frac_lines_assert_quality_signal
float64 | qsc_codepython_cate_ast_quality_signal
float64 | qsc_codepython_frac_lines_func_ratio_quality_signal
float64 | qsc_codepython_cate_var_zero_quality_signal
bool | qsc_codepython_frac_lines_pass_quality_signal
float64 | qsc_codepython_frac_lines_import_quality_signal
float64 | qsc_codepython_frac_lines_simplefunc_quality_signal
float64 | qsc_codepython_score_lines_no_logic_quality_signal
float64 | qsc_codepython_frac_lines_print_quality_signal
float64 | qsc_code_num_words
int64 | qsc_code_num_chars
int64 | qsc_code_mean_word_length
int64 | qsc_code_frac_words_unique
null | qsc_code_frac_chars_top_2grams
int64 | qsc_code_frac_chars_top_3grams
int64 | qsc_code_frac_chars_top_4grams
int64 | qsc_code_frac_chars_dupe_5grams
int64 | qsc_code_frac_chars_dupe_6grams
int64 | qsc_code_frac_chars_dupe_7grams
int64 | qsc_code_frac_chars_dupe_8grams
int64 | qsc_code_frac_chars_dupe_9grams
int64 | qsc_code_frac_chars_dupe_10grams
int64 | qsc_code_frac_chars_replacement_symbols
int64 | qsc_code_frac_chars_digital
int64 | qsc_code_frac_chars_whitespace
int64 | qsc_code_size_file_byte
int64 | qsc_code_num_lines
int64 | qsc_code_num_chars_line_max
int64 | qsc_code_num_chars_line_mean
int64 | qsc_code_frac_chars_alphabet
int64 | qsc_code_frac_chars_comments
int64 | qsc_code_cate_xml_start
int64 | qsc_code_frac_lines_dupe_lines
int64 | qsc_code_cate_autogen
int64 | qsc_code_frac_lines_long_string
int64 | qsc_code_frac_chars_string_length
int64 | qsc_code_frac_chars_long_word_length
int64 | qsc_code_frac_lines_string_concat
null | qsc_code_cate_encoded_data
int64 | qsc_code_frac_chars_hex_words
int64 | qsc_code_frac_lines_prompt_comments
int64 | qsc_code_frac_lines_assert
int64 | qsc_codepython_cate_ast
int64 | qsc_codepython_frac_lines_func_ratio
int64 | qsc_codepython_cate_var_zero
int64 | qsc_codepython_frac_lines_pass
int64 | qsc_codepython_frac_lines_import
int64 | qsc_codepython_frac_lines_simplefunc
int64 | qsc_codepython_score_lines_no_logic
int64 | qsc_codepython_frac_lines_print
int64 | effective
string | hits
int64 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
8418475e8b117a7899349c6df5fd5aeff3d447b2
| 996
|
py
|
Python
|
4 - observer pattern/api/event_system.py
|
lucascionis/betterpython
|
ab8db8c016ff0bccc443443740a26bccb70402f3
|
[
"MIT"
] | null | null | null |
4 - observer pattern/api/event_system.py
|
lucascionis/betterpython
|
ab8db8c016ff0bccc443443740a26bccb70402f3
|
[
"MIT"
] | null | null | null |
4 - observer pattern/api/event_system.py
|
lucascionis/betterpython
|
ab8db8c016ff0bccc443443740a26bccb70402f3
|
[
"MIT"
] | null | null | null |
from abc import ABC, abstractmethod
'''Comments
In the original solution only functions were used to
implement the event system (observer pattern).
In this implementation I wanted to write classes (to be as
nearest as possible to the pattern (?)).
It is surely better to use python first-citizen functions to create
the event handles (basically this is what I done, I created handle
classes to write different implementations of update method).
'''
class EventListener(ABC):
@abstractmethod
def update(self, data):
pass
class EventSystem():
def __init__(self):
self.subscribers = {}
def add_subscriber(self, event: str, subscriber: EventListener):
if event in self.subscribers:
self.subscribers[event].append(subscriber)
return
self.subscribers[event] = [subscriber,]
def trigger_event(self, event: str, data):
for subscriber in self.subscribers[event]:
subscriber.update(data)
| 25.538462
| 68
| 0.696787
| 125
| 996
| 5.504
| 0.528
| 0.109012
| 0.087209
| 0.087209
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.228916
| 996
| 38
| 69
| 26.210526
| 0.895833
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.25
| false
| 0.0625
| 0.0625
| 0
| 0.5
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 2
|
842b2f72a08093681d688bb2f92eb0afb6f06354
| 742
|
py
|
Python
|
quartic_sdk/core/entities/site.py
|
divyquartic/QuarticSDK
|
e3ce6387ed5f27845e0909878c831ae39badd8eb
|
[
"MIT"
] | 1
|
2021-03-26T12:39:44.000Z
|
2021-03-26T12:39:44.000Z
|
quartic_sdk/core/entities/site.py
|
divyquartic/QuarticSDK
|
e3ce6387ed5f27845e0909878c831ae39badd8eb
|
[
"MIT"
] | 95
|
2021-02-18T03:15:38.000Z
|
2022-03-25T05:39:12.000Z
|
quartic_sdk/core/entities/site.py
|
divyquartic/QuarticSDK
|
e3ce6387ed5f27845e0909878c831ae39badd8eb
|
[
"MIT"
] | 1
|
2021-09-03T12:46:18.000Z
|
2021-09-03T12:46:18.000Z
|
"""
The given file contains the class to refer to the Site entity
"""
from quartic_sdk.core.entities.base import Base
import quartic_sdk.utilities.constants as Constants
class Site(Base):
"""
The given class refers to the site entity which is created based upon the site response
returned by the API
"""
def __repr__(self):
"""
Override the method to return the site name
"""
return f"<{Constants.SITE_ENTITY}: {self.name}>"
def assets(self):
"""
Get the assets belongs to a site
"""
raise NotImplementedError
def edge_connectors(self):
"""
Get the edge_connectors belongs to a site
"""
raise NotImplementedError
| 23.935484
| 91
| 0.630728
| 94
| 742
| 4.882979
| 0.478723
| 0.061002
| 0.039216
| 0.065359
| 0.165577
| 0.165577
| 0
| 0
| 0
| 0
| 0
| 0
| 0.291105
| 742
| 30
| 92
| 24.733333
| 0.872624
| 0.38814
| 0
| 0.222222
| 0
| 0
| 0.106145
| 0.069832
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| false
| 0
| 0.222222
| 0
| 0.777778
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 2
|
842c3f72d982dddd1077d864f70783e67cb8182b
| 525
|
py
|
Python
|
newapp/migrations/0003_auto_20190524_1511.py
|
HCDigitalScholarship/liason_lair
|
6035d100e3ea1216af2907a4cccd319a1cc4f8d8
|
[
"MIT"
] | null | null | null |
newapp/migrations/0003_auto_20190524_1511.py
|
HCDigitalScholarship/liason_lair
|
6035d100e3ea1216af2907a4cccd319a1cc4f8d8
|
[
"MIT"
] | null | null | null |
newapp/migrations/0003_auto_20190524_1511.py
|
HCDigitalScholarship/liason_lair
|
6035d100e3ea1216af2907a4cccd319a1cc4f8d8
|
[
"MIT"
] | 1
|
2019-08-03T01:30:30.000Z
|
2019-08-03T01:30:30.000Z
|
# Generated by Django 2.0.5 on 2019-05-24 15:11
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('newapp', '0002_auto_20190524_1507'),
]
operations = [
migrations.AlterField(
model_name='course',
name='additional_info',
field=models.TextField(),
),
migrations.AlterField(
model_name='course',
name='misc_links',
field=models.TextField(),
),
]
| 21.875
| 47
| 0.565714
| 51
| 525
| 5.686275
| 0.705882
| 0.137931
| 0.172414
| 0.2
| 0.268966
| 0.268966
| 0
| 0
| 0
| 0
| 0
| 0.086835
| 0.32
| 525
| 23
| 48
| 22.826087
| 0.72549
| 0.085714
| 0
| 0.470588
| 1
| 0
| 0.138075
| 0.048117
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.058824
| 0
| 0.235294
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 2
|
843bb9c05ba6309f2f5fa04bc4ff12d51bd9395e
| 430
|
py
|
Python
|
pages/homepage.py
|
eugenexxx/laptop_docker
|
362ea238296e64fdd5c49ac55185d65b05e718cc
|
[
"Apache-2.0"
] | null | null | null |
pages/homepage.py
|
eugenexxx/laptop_docker
|
362ea238296e64fdd5c49ac55185d65b05e718cc
|
[
"Apache-2.0"
] | null | null | null |
pages/homepage.py
|
eugenexxx/laptop_docker
|
362ea238296e64fdd5c49ac55185d65b05e718cc
|
[
"Apache-2.0"
] | null | null | null |
from webium import BasePage, Finds, Find
from selenium.webdriver.common.by import By
class Homepage(BasePage):
catalog_header = Find(by=By.CLASS_NAME, value="Header__BlockCatalogLink")
computers_label = Find(by=By.CSS_SELECTOR, value="a[href='/kompyutery/']")
laptops_accessories_label = Find(by=By.XPATH, value="//a[contains(.,'Ноутбуки и аксессуары')]")
laptops_label = Find(by=By.LINK_TEXT, value="Ноутбуки")
| 43
| 99
| 0.746512
| 59
| 430
| 5.271186
| 0.559322
| 0.07717
| 0.102894
| 0.125402
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.111628
| 430
| 9
| 100
| 47.777778
| 0.814136
| 0
| 0
| 0
| 0
| 0
| 0.218605
| 0.162791
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.285714
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 2
|
843ca99856298b4d971576c36ef2ff0db2f48136
| 1,386
|
py
|
Python
|
pdata_app/migrations/0035_auto_20180221_1515.py
|
jonseddon/primavera-dmt
|
1239044e37f070b925a3d06db68351f285df780c
|
[
"BSD-3-Clause"
] | null | null | null |
pdata_app/migrations/0035_auto_20180221_1515.py
|
jonseddon/primavera-dmt
|
1239044e37f070b925a3d06db68351f285df780c
|
[
"BSD-3-Clause"
] | 49
|
2018-11-14T17:00:03.000Z
|
2021-12-20T11:04:22.000Z
|
pdata_app/migrations/0035_auto_20180221_1515.py
|
jonseddon/primavera-dmt
|
1239044e37f070b925a3d06db68351f285df780c
|
[
"BSD-3-Clause"
] | 2
|
2018-07-04T10:58:43.000Z
|
2018-09-29T14:55:08.000Z
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11 on 2018-02-21 15:15
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('pdata_app', '0034_auto_20180221_1158'),
]
operations = [
migrations.AddField(
model_name='observationdataset',
name='cached_directories',
field=models.CharField(blank=True, max_length=200, null=True, verbose_name=b'Directory'),
),
migrations.AddField(
model_name='observationdataset',
name='cached_end_time',
field=models.DateTimeField(blank=True, null=True, verbose_name=b'End Time'),
),
migrations.AddField(
model_name='observationdataset',
name='cached_num_files',
field=models.IntegerField(blank=True, null=True, verbose_name=b'# Data Files'),
),
migrations.AddField(
model_name='observationdataset',
name='cached_start_time',
field=models.DateTimeField(blank=True, null=True, verbose_name=b'Start Time'),
),
migrations.AddField(
model_name='observationdataset',
name='cached_variables',
field=models.CharField(blank=True, max_length=500, null=True, verbose_name=b'Variables'),
),
]
| 33.804878
| 101
| 0.622655
| 144
| 1,386
| 5.791667
| 0.402778
| 0.107914
| 0.13789
| 0.161871
| 0.64988
| 0.601918
| 0.601918
| 0.278177
| 0.136691
| 0.136691
| 0
| 0.037182
| 0.262626
| 1,386
| 40
| 102
| 34.65
| 0.778865
| 0.047619
| 0
| 0.454545
| 1
| 0
| 0.191344
| 0.017464
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.060606
| 0
| 0.151515
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 2
|
8463c9fd49aa8b29fdde20c5d6a8fdfd2fb75f46
| 300
|
py
|
Python
|
src/gencoef/test.py
|
bwasti/sleef
|
4d260ae7f5d0e76a3c5424149deb838373e1894b
|
[
"BSL-1.0"
] | null | null | null |
src/gencoef/test.py
|
bwasti/sleef
|
4d260ae7f5d0e76a3c5424149deb838373e1894b
|
[
"BSL-1.0"
] | null | null | null |
src/gencoef/test.py
|
bwasti/sleef
|
4d260ae7f5d0e76a3c5424149deb838373e1894b
|
[
"BSL-1.0"
] | null | null | null |
import numpy as np
import math
p = np.poly1d([
+0.1429511242e-53,
+0.1561712123e-44,
-0.2259472298e-35,
-0.2669710222e-26,
+0.9784247973e-18,
+0.1655572013e-8,
+0.3991098106e+0,
])
def sigmoid(x):
return 1 / (1 + math.exp(-x))
for i in range(1000):
k = float(i) / 100
print(sigmoid(k), p(k))
| 15.789474
| 31
| 0.656667
| 53
| 300
| 3.716981
| 0.679245
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.386719
| 0.146667
| 300
| 18
| 32
| 16.666667
| 0.382813
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.0625
| false
| 0
| 0.125
| 0.0625
| 0.25
| 0.0625
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 2
|
ffbfef0bf96a36ae6a5da2b1bf0bdc5756f1e309
| 3,931
|
py
|
Python
|
poc.py
|
evinr/basis-scraper
|
cb20574aadc469c6d1527ddb5a3cc69df531cbfd
|
[
"MIT"
] | null | null | null |
poc.py
|
evinr/basis-scraper
|
cb20574aadc469c6d1527ddb5a3cc69df531cbfd
|
[
"MIT"
] | null | null | null |
poc.py
|
evinr/basis-scraper
|
cb20574aadc469c6d1527ddb5a3cc69df531cbfd
|
[
"MIT"
] | null | null | null |
import serial
def setup():
ser = serial.Serial('/dev/ttyUSB0', timeout=2)
ser.setRTS(True)
ser.setRTS(False)
if ser.isOpen():
ser.close()
ser.open()
ser.isOpen()
print "USB connection established"
def read():
rawString = ser.readline()
print rawString
return (str(rawString))
def write(stringVariable):
ser.write(stringVariable.encode())
def handshake():
write('AA 02 00 00 04 06 0A 00 AB')
#Expect
#01 60 AA 07 00 00 04 07 02 3D 02 03 02 51 00 AB
write('AA 02 00 00 05 06 0B 00 AB')
#Expect
#01 60 AA 0B 00 00 05 07 02 1A 0D A0 66 00 00 00 00 3B 01 AB
write('AA 02 00 00 0A 06 10 00 AB')
#Expect
#01 60 AA 0F 00 00 0A 07 02 30 30 30 34 33 65 30 32 65 64 64 65 63 03 AB
write('AA 02 00 00 09 06 0F 00 AB')
#This is assumed to be the manifest of data, ie what is currently contained on the device
#When no data is present, ie the watch has just been sitting there. Expect
#01 60 AA 05 00 00 09 07 02 1C 0B 39 00 AB
#TODO: Determine what this string is and how it is used
#this is based on quick and constant syncs, verify as normal behavior
write('AA 02 00 00 07 06 0D 00 AB')
#Same A
#Assumed to be tied to the 'firmware update', as when that gets pushed the contents of this change in the same spot.
# Three char sets change on these over the course of the contant syncs
# Lots of padding on this one
#TODO: Determine what this string is and how it is used
write('AA 23 00 00 05 04 00 52 BC 52 B9 3C 09 12 1B 64 12 CD 9B FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF 5E 18 AB')
#TODO: Determine if this string is consistant
#Expect
#01 60 AA 03 00 00 05 05 01 0B 00 AB
write('AA 02 00 00 06 04 0A 00 AB')
#Expect
#01 60 AA 03 00 00 06 05 01 0C 00 AB
write('AA 02 00 00 07 06 0D 00 AB')
#Same A
write('AA 08 00 00 04 04 1F 01 AC 2A 00 03 03 D8 00 AB')
#expect
#01 60 AA 03 00 00 04 05 01 0A 00 AB
# Current time gets sent here
#dynamic
# TODO: Determine how to send specific date times
write('AA 08 00 00 00 04 45 9B 05 09 5C FE 4C 02 AB') #201510181406
#expect
#01 60 AA 03 00 00 00 05 01 06 00 AB
write('AA 07 00 00 0C 04 00 10 27 00 00 47 00 AB')
#expect
#01 60 AA 03 00 00 0C 05 01 12 00 AB
write('AA 02 00 00 10 04 14 00 AB')
#expect
#01 60 AA 03 00 00 10 05 01 16 00 AB
write('AA 02 00 00 01 06 07 00 AB')
#Expect
#01 60 AA 07 00 00 01 07 02 7E 0B 00 00 93 00 AB
#01 60 AA 07 00 00 01 07 02 0A 00 00 00 14 00 AB
#01 60 AA 07 00 00 01 07 02 14 00 00 00 1E 00 AB
#01 60 AA 07 00 00 01 07 02 0A 00 00 00 14 00 AB
#01 60 AA 07 00 00 01 07 02 0A 00 00 00 14 00 AB
#01 60 AA 07 00 00 01 07 02 0A 00 00 00 14 00 AB
#01 60 AA 07 00 00 01 07 02 0A 00 00 00 14 00 AB
write('AA 02 00 00 02 06 08 00 AB')
#expect
#01 60 AA 05 00 00 02 07 02 01 00 0C 00 AB
write('AA 04 00 00 03 06 00 00 09 00 AB')
#expect
#real data here, with what appears to be aggregates in the header
write('AA 02 00 00 01 04 05 00 AB')
#expect
#01 60 AA 03 00 00 01 05 01 07 00 AB
write('')
def chilling():
isChilling = read()
if isChilling == '01 60 AA 07 00 00 00 03 01 3D 02 06 00 49 00 AB':
print "device is ready for data transfer"
def deletingData():
write('AA 02 00 00 08 06 0E 00 AB')
print "are we done transfering data?"
isDeletingData = read()
if isDeletingData == '01 60 AA 04 00 00 08 07 02 01 12 00 AB':
print "device is still deleting data from memory"
elif isDeletingData == '01 60 AA 04 00 00 08 07 02 00 11 00 AB':
print "device is done deleting data from memory"
else:
print "something unexpected happened"
#at this point steady chilling is what happens every so many seconds
#TODO: define the gathering of all of the possible data sets being extracted
#Biometrics
# Heart Rate
# STEPS
# CALORIES
# SKIN TEMP
# PERSPIRATION
#Activity
# Walking
# Running
# Biking
#Sleep
# REM
# Mind Refresh
# Light
# Deep
# Body Refresh
# Interruptions
# Toss & Turn
| 27.110345
| 135
| 0.675401
| 852
| 3,931
| 3.116197
| 0.266432
| 0.090395
| 0.049718
| 0.051224
| 0.363842
| 0.319397
| 0.283616
| 0.222976
| 0.222976
| 0.162712
| 0
| 0.285616
| 0.260748
| 3,931
| 145
| 136
| 27.110345
| 0.628011
| 0.511066
| 0
| 0.041667
| 0
| 0.020833
| 0.503504
| 0
| 0
| 0
| 0
| 0.006897
| 0
| 0
| null | null | 0
| 0.020833
| null | null | 0.145833
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 2
|
ffcbaba69ba29dbe70293f1d332c038a6aaf91b9
| 1,373
|
py
|
Python
|
datastore/__init__.py
|
Swixx/py-datastore
|
dfa1f9dcc3cc3beac3c3e79d085cb6e89da97a1c
|
[
"MIT"
] | 6
|
2019-08-04T04:11:36.000Z
|
2020-02-20T17:10:26.000Z
|
datastore/__init__.py
|
Swixx/py-datastore
|
dfa1f9dcc3cc3beac3c3e79d085cb6e89da97a1c
|
[
"MIT"
] | 23
|
2019-09-17T11:35:06.000Z
|
2020-04-07T16:18:15.000Z
|
datastore/__init__.py
|
Swixx/py-datastore
|
dfa1f9dcc3cc3beac3c3e79d085cb6e89da97a1c
|
[
"MIT"
] | 6
|
2019-08-04T02:02:25.000Z
|
2020-03-01T15:43:41.000Z
|
"""
Datastore is a generic layer of abstraction for data store and database access.
It is a **simple** API with the aim to enable application development in a
datastore-agnostic way, allowing datastores to be swapped seamlessly without
changing application code. Thus, one can leverage different datastores with
different strengths without committing the application to one datastore
throughout its lifetime.
"""
__version__ = "0.3.6"
__author__ = "Juan Batiz-Benet, Alexander Schlarb"
__email__ = "[email protected], [email protected]"
__all__ = (
"Key", "Namespace",
"BinaryNullDatastore", "BinaryDictDatastore",
"ObjectNullDatastore", "ObjectDictDatastore",
"Query", "Cursor",
"SerializerAdapter",
"abc", "typing", "util"
)
# import core.key
from .core.key import Key
from .core.key import Namespace
# import core.binarystore, core.objectstore
from .core.binarystore import NullDatastore as BinaryNullDatastore
from .core.binarystore import DictDatastore as BinaryDictDatastore
from .core.objectstore import NullDatastore as ObjectNullDatastore
from .core.objectstore import DictDatastore as ObjectDictDatastore
# import core.query
from .core.query import Query
from .core.query import Cursor
# import core.serialize
from .core.serialize import SerializerAdapter
### Exposed submodules ###
from . import abc
from . import typing
from . import util
| 29.212766
| 79
| 0.79024
| 167
| 1,373
| 6.401198
| 0.491018
| 0.067353
| 0.02058
| 0.026193
| 0.08232
| 0
| 0
| 0
| 0
| 0
| 0
| 0.002513
| 0.130371
| 1,373
| 46
| 80
| 29.847826
| 0.892797
| 0.380189
| 0
| 0
| 0
| 0
| 0.252101
| 0.031212
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.521739
| 0
| 0.521739
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 2
|
ffd4de322115b22ae4e36e0be2d07a40743376b4
| 1,340
|
py
|
Python
|
users/models.py
|
connorgannaway/dockmate
|
040d44cac896aabc1488f3ed9d59b417e20719d8
|
[
"MIT"
] | null | null | null |
users/models.py
|
connorgannaway/dockmate
|
040d44cac896aabc1488f3ed9d59b417e20719d8
|
[
"MIT"
] | null | null | null |
users/models.py
|
connorgannaway/dockmate
|
040d44cac896aabc1488f3ed9d59b417e20719d8
|
[
"MIT"
] | null | null | null |
from os import name
from django.db import models
from django.contrib.auth.models import User
from PIL import Image
#Model classes are tables objects in a database.
#each variable is a column and its datatype.
#__str__ method defines the name of a object (row) in a database table
#profile model is meant to be used as an extension to the User model
#this is so users can have a profile picture and be connected to a company
class Company(models.Model):
name = models.CharField(max_length=50, unique=True)
key = models.CharField(max_length=12, unique=True)
def __str__(self):
return self.name
class Profile(models.Model):
user = models.OneToOneField(User, on_delete=models.CASCADE)
picture = models.ImageField(default='default.jpg', upload_to='profile_pics')
company = models.OneToOneField(Company, on_delete=models.CASCADE, null=True, blank=True)
def __str__(self):
return f"{self.user.username}'s Profile"
#overriding save method to resize image before saving.
#used for local file systems, does not work with AWS S3
""" def save(self, *args, **kwargs):
super().save(*args, **kwargs)
image = Image.open(self.picture.path)
if image.width > 300 or image.height > 300:
image.thumbnail((300, 300))
image.save(self.picture.path) """
| 37.222222
| 92
| 0.709701
| 199
| 1,340
| 4.688442
| 0.517588
| 0.021436
| 0.02358
| 0.051447
| 0.042872
| 0
| 0
| 0
| 0
| 0
| 0
| 0.015755
| 0.194776
| 1,340
| 35
| 93
| 38.285714
| 0.848934
| 0.302985
| 0
| 0.133333
| 0
| 0
| 0.079699
| 0.033083
| 0
| 0
| 0
| 0
| 0
| 1
| 0.133333
| false
| 0
| 0.266667
| 0.133333
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 2
|
081c3a4e5b548789411fc11be988031444e552dd
| 233
|
py
|
Python
|
dialogue/tensorflow/task/common/common.py
|
ishine/nlp-dialogue
|
d47c1438cb5c45c2c2aebfb82fea92bef4c3d65c
|
[
"Apache-2.0"
] | 478
|
2020-10-28T01:30:30.000Z
|
2022-03-30T06:34:07.000Z
|
paper-code/tensorflow_src/models/task/common/common.py
|
HengYongChao/nlp-paper
|
fcf985e3c9bfd6944d07c4c36afbaee3384d040d
|
[
"Apache-2.0"
] | 1
|
2021-08-29T11:55:09.000Z
|
2021-11-04T09:25:19.000Z
|
paper-code/tensorflow_src/models/task/common/common.py
|
HengYongChao/nlp-paper
|
fcf985e3c9bfd6944d07c4c36afbaee3384d040d
|
[
"Apache-2.0"
] | 89
|
2021-01-05T06:11:55.000Z
|
2022-03-24T12:51:57.000Z
|
from optparse import OptionParser
class CmdParser(OptionParser):
def error(self, msg):
print('Error!提示信息如下:')
self.print_help()
self.exit(0)
def exit(self, status=0, msg=None):
exit(status)
| 19.416667
| 39
| 0.622318
| 29
| 233
| 4.965517
| 0.586207
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.011561
| 0.257511
| 233
| 11
| 40
| 21.181818
| 0.820809
| 0
| 0
| 0
| 0
| 0
| 0.055794
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.25
| false
| 0
| 0.125
| 0
| 0.5
| 0.25
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 2
|
f248957a375715c7681a4295ca66a47a10ee7ea3
| 6,891
|
py
|
Python
|
tempest/tests/common/test_service_clients.py
|
xavpaice/tempest
|
958bd694df27511e0346d799876fe49331b8145c
|
[
"Apache-2.0"
] | null | null | null |
tempest/tests/common/test_service_clients.py
|
xavpaice/tempest
|
958bd694df27511e0346d799876fe49331b8145c
|
[
"Apache-2.0"
] | null | null | null |
tempest/tests/common/test_service_clients.py
|
xavpaice/tempest
|
958bd694df27511e0346d799876fe49331b8145c
|
[
"Apache-2.0"
] | null | null | null |
# Copyright 2015 NEC Corporation. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import mock
import random
import six
from tempest.services.baremetal.v1.json import baremetal_client
from tempest.services.data_processing.v1_1 import data_processing_client
from tempest.services.database.json import flavors_client as db_flavor_client
from tempest.services.database.json import versions_client as db_version_client
from tempest.services.identity.v2.json import identity_client as \
identity_v2_identity_client
from tempest.services.identity.v3.json import credentials_client
from tempest.services.identity.v3.json import endpoints_client
from tempest.services.identity.v3.json import identity_client as \
identity_v3_identity_client
from tempest.services.identity.v3.json import policies_client
from tempest.services.identity.v3.json import regions_client
from tempest.services.identity.v3.json import services_client
from tempest.services.image.v1.json import images_client
from tempest.services.image.v2.json import images_client as images_v2_client
from tempest.services.messaging.json import messaging_client
from tempest.services.network.json import network_client
from tempest.services.object_storage import account_client
from tempest.services.object_storage import container_client
from tempest.services.object_storage import object_client
from tempest.services.orchestration.json import orchestration_client
from tempest.services.telemetry.json import alarming_client
from tempest.services.telemetry.json import telemetry_client
from tempest.services.volume.v1.json.admin import hosts_client \
as volume_hosts_client
from tempest.services.volume.v1.json.admin import quotas_client \
as volume_quotas_client
from tempest.services.volume.v1.json.admin import services_client \
as volume_services_client
from tempest.services.volume.v1.json.admin import types_client \
as volume_types_client
from tempest.services.volume.v1.json import availability_zone_client \
as volume_az_client
from tempest.services.volume.v1.json import backups_client
from tempest.services.volume.v1.json import extensions_client \
as volume_extensions_client
from tempest.services.volume.v1.json import qos_client
from tempest.services.volume.v1.json import snapshots_client
from tempest.services.volume.v1.json import volumes_client
from tempest.services.volume.v2.json.admin import hosts_client \
as volume_v2_hosts_client
from tempest.services.volume.v2.json.admin import quotas_client \
as volume_v2_quotas_client
from tempest.services.volume.v2.json.admin import services_client \
as volume_v2_services_client
from tempest.services.volume.v2.json.admin import types_client \
as volume_v2_types_client
from tempest.services.volume.v2.json import availability_zone_client \
as volume_v2_az_client
from tempest.services.volume.v2.json import backups_client \
as volume_v2_backups_client
from tempest.services.volume.v2.json import extensions_client \
as volume_v2_extensions_client
from tempest.services.volume.v2.json import qos_client as volume_v2_qos_client
from tempest.services.volume.v2.json import snapshots_client \
as volume_v2_snapshots_client
from tempest.services.volume.v2.json import volumes_client as \
volume_v2_volumes_client
from tempest.tests import base
class TestServiceClient(base.TestCase):
@mock.patch('tempest_lib.common.rest_client.RestClient.__init__')
def test_service_client_creations_with_specified_args(self, mock_init):
test_clients = [
baremetal_client.BaremetalClient,
data_processing_client.DataProcessingClient,
db_flavor_client.DatabaseFlavorsClient,
db_version_client.DatabaseVersionsClient,
messaging_client.MessagingClient,
network_client.NetworkClient,
account_client.AccountClient,
container_client.ContainerClient,
object_client.ObjectClient,
orchestration_client.OrchestrationClient,
telemetry_client.TelemetryClient,
alarming_client.AlarmingClient,
qos_client.QosSpecsClient,
volume_hosts_client.HostsClient,
volume_quotas_client.QuotasClient,
volume_services_client.ServicesClient,
volume_types_client.TypesClient,
volume_az_client.AvailabilityZoneClient,
backups_client.BackupsClient,
volume_extensions_client.ExtensionsClient,
snapshots_client.SnapshotsClient,
volumes_client.VolumesClient,
volume_v2_hosts_client.HostsClient,
volume_v2_quotas_client.QuotasClient,
volume_v2_services_client.ServicesClient,
volume_v2_types_client.TypesClient,
volume_v2_az_client.AvailabilityZoneClient,
volume_v2_backups_client.BackupsClient,
volume_v2_extensions_client.ExtensionsClient,
volume_v2_qos_client.QosSpecsClient,
volume_v2_snapshots_client.SnapshotsClient,
volume_v2_volumes_client.VolumesClient,
identity_v2_identity_client.IdentityClient,
credentials_client.CredentialsClient,
endpoints_client.EndPointClient,
identity_v3_identity_client.IdentityV3Client,
policies_client.PoliciesClient,
regions_client.RegionsClient,
services_client.ServicesClient,
images_client.ImagesClient,
images_v2_client.ImagesClientV2
]
for client in test_clients:
fake_string = six.text_type(random.randint(1, 0x7fffffff))
auth = 'auth' + fake_string
service = 'service' + fake_string
region = 'region' + fake_string
params = {
'endpoint_type': 'URL' + fake_string,
'build_interval': random.randint(1, 100),
'build_timeout': random.randint(1, 100),
'disable_ssl_certificate_validation':
True if random.randint(0, 1) else False,
'ca_certs': None,
'trace_requests': 'foo' + fake_string
}
client(auth, service, region, **params)
mock_init.assert_called_once_with(auth, service, region, **params)
mock_init.reset_mock()
| 47.524138
| 79
| 0.753882
| 837
| 6,891
| 5.951016
| 0.236559
| 0.092752
| 0.156394
| 0.200763
| 0.419795
| 0.399518
| 0.343706
| 0.238105
| 0.098374
| 0
| 0
| 0.014477
| 0.188071
| 6,891
| 144
| 80
| 47.854167
| 0.875782
| 0.087505
| 0
| 0
| 0
| 0
| 0.026937
| 0.013389
| 0
| 0
| 0.001594
| 0
| 0.008
| 1
| 0.008
| false
| 0
| 0.36
| 0
| 0.376
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
|
0
| 2
|
f26261a417ed0c023adbf8199f1af50c5d35454f
| 351
|
py
|
Python
|
source/exceptions.py
|
corradiniste/Paneka-discord-bot
|
43672b2720c88635266e0962f446bd36bd7ced7d
|
[
"MIT"
] | 12
|
2020-08-03T05:47:20.000Z
|
2021-10-06T06:20:19.000Z
|
source/exceptions.py
|
Shivanirudh/Paneka-discord-bot
|
0ec257e92e40baf23233711c9cf9889e8c56ab53
|
[
"MIT"
] | 5
|
2020-10-03T08:27:35.000Z
|
2021-06-02T04:45:57.000Z
|
source/exceptions.py
|
Shivanirudh/Paneka-discord-bot
|
0ec257e92e40baf23233711c9cf9889e8c56ab53
|
[
"MIT"
] | 6
|
2020-08-06T10:41:49.000Z
|
2022-02-14T17:26:07.000Z
|
class InvalidLimitException(Exception):
"""
Invalid number of matches requested
"""
pass
class InvalidLeagueCodeException(Exception):
"""
The League code requested is either invalid or not supported
"""
pass
class InvalidTeamCodeException(Exception):
"""
The Team Code requested is invalid
"""
pass
| 17.55
| 64
| 0.672365
| 33
| 351
| 7.151515
| 0.606061
| 0.076271
| 0.127119
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.250712
| 351
| 19
| 65
| 18.473684
| 0.897338
| 0.373219
| 0
| 0.5
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.5
| 0
| 0
| 0.5
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
|
0
| 2
|
f263d10e4b0315d66a52d4a47d9ce8cba72ce9a2
| 336
|
py
|
Python
|
Task1F.py
|
momopmoXZ/1a-flood-coding
|
13d2f6387e136f046b07a045eadfe654e9c2c27f
|
[
"MIT"
] | null | null | null |
Task1F.py
|
momopmoXZ/1a-flood-coding
|
13d2f6387e136f046b07a045eadfe654e9c2c27f
|
[
"MIT"
] | null | null | null |
Task1F.py
|
momopmoXZ/1a-flood-coding
|
13d2f6387e136f046b07a045eadfe654e9c2c27f
|
[
"MIT"
] | 1
|
2022-02-07T17:04:41.000Z
|
2022-02-07T17:04:41.000Z
|
from floodsystem.stationdata import build_station_list
from floodsystem.station import inconsistent_typical_range_stations
stations = build_station_list()
incon_station=inconsistent_typical_range_stations(stations)
incon_names=[]
for station in incon_station:
incon_names.append(station.name)
incon_names.sort()
print (incon_names)
| 33.6
| 67
| 0.863095
| 44
| 336
| 6.227273
| 0.431818
| 0.145985
| 0.116788
| 0.233577
| 0.291971
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.074405
| 336
| 9
| 68
| 37.333333
| 0.881029
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.222222
| 0
| 0.222222
| 0.111111
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 2
|
f2661fcc769c20d3c2e052ada4cb40f950039d1a
| 675
|
py
|
Python
|
tallerestructurasselectivas/14.py
|
juandab07/Algoritmos-y-programacion
|
f3c10f5c4620b15432ecfe2b9f5831437a49ace9
|
[
"MIT"
] | null | null | null |
tallerestructurasselectivas/14.py
|
juandab07/Algoritmos-y-programacion
|
f3c10f5c4620b15432ecfe2b9f5831437a49ace9
|
[
"MIT"
] | null | null | null |
tallerestructurasselectivas/14.py
|
juandab07/Algoritmos-y-programacion
|
f3c10f5c4620b15432ecfe2b9f5831437a49ace9
|
[
"MIT"
] | null | null | null |
print('ingrese el monto a pagar en aseo urbano')
aseo=float(input())
print('ingrese el valor de lectura del mes anterior')
ant=float(input())
print('ingrese el valor de lectura del mes actual')
act=float(input())
cons=act-ant
if 0<cons<=100:
pago=cons*4600
print('debera pagar $',pago,'en luz electrica y',aseo,'en aseo urbano')
if 101<cons<=300:
pago=cons*80000
print('debera pagar $',pago,'en luz electrica y',aseo,'en aseo urbano')
if 301<cons<=500:
pago=cons*100000
print('debera pagar $',pago,'en luz electrica y',aseo,'en aseo urbano')
if cons>500:
pago=cons*120000
print('debera pagar $',pago,'en luz electrica y',aseo,'en aseo urbano')
| 35.526316
| 75
| 0.694815
| 114
| 675
| 4.114035
| 0.324561
| 0.063966
| 0.127932
| 0.170576
| 0.635394
| 0.635394
| 0.635394
| 0.635394
| 0.635394
| 0.635394
| 0
| 0.06993
| 0.152593
| 675
| 19
| 76
| 35.526316
| 0.75
| 0
| 0
| 0.210526
| 0
| 0
| 0.457101
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0.368421
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 2
|
f26a8afeac7319e72d66512791f4976ac936a01f
| 1,275
|
py
|
Python
|
examples/1-marshmallow/server/resources/user/schema.py
|
FlyingBird95/openapi_generator
|
df4649b9723eb89fa370b02220356b7596794069
|
[
"MIT"
] | 3
|
2022-01-10T12:43:36.000Z
|
2022-01-13T18:08:15.000Z
|
examples/1-marshmallow/server/resources/user/schema.py
|
FlyingBird95/openapi_generator
|
df4649b9723eb89fa370b02220356b7596794069
|
[
"MIT"
] | 6
|
2022-02-06T19:00:05.000Z
|
2022-03-22T14:22:21.000Z
|
examples/1-marshmallow/server/resources/user/schema.py
|
FlyingBird95/openapi-builder
|
df4649b9723eb89fa370b02220356b7596794069
|
[
"MIT"
] | 2
|
2021-12-17T17:26:06.000Z
|
2021-12-17T17:39:00.000Z
|
from marshmallow import Schema, fields
class RegisterUser(Schema):
"""Deserialize register user schema."""
email = fields.Email(required=True)
"""Email."""
first_name = fields.String(required=True)
"""First name."""
last_name = fields.String(required=True)
"""Last name."""
password = fields.String(required=True)
"""Password."""
class UpdateUser(Schema):
"""Deserialize update user schema."""
first_name = fields.String(required=False)
"""First name."""
last_name = fields.String(required=False)
"""Last name."""
password = fields.String(required=False)
"""Password."""
class UserSchema(Schema):
"""User response schema."""
id = fields.Integer()
"""ID."""
email = fields.Email()
"""Email."""
first_name = fields.String()
"""First name."""
last_name = fields.String()
"""Last name."""
register_date = fields.DateTime()
"""Register date."""
class ErrorSchema(Schema):
"""Error response schema."""
message = fields.String()
"""The error message."""
class FunctionSchema(Schema):
"""Test schema for showing how a custom field can be serialized."""
@fields.Function
def list_of_strings(self):
return ["abc", "def"]
| 19.615385
| 71
| 0.620392
| 137
| 1,275
| 5.708029
| 0.364964
| 0.138107
| 0.122762
| 0.122762
| 0.360614
| 0.223785
| 0.094629
| 0
| 0
| 0
| 0
| 0
| 0.212549
| 1,275
| 64
| 72
| 19.921875
| 0.778884
| 0.134902
| 0
| 0
| 0
| 0
| 0.006889
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.045455
| false
| 0.090909
| 0.045455
| 0.045455
| 0.954545
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
|
0
| 2
|
f27a87d9305d94ef4ecc93fe8c501738b9c6465e
| 582
|
py
|
Python
|
recipes/Python/474122_neat/recipe-474122.py
|
tdiprima/code
|
61a74f5f93da087d27c70b2efe779ac6bd2a3b4f
|
[
"MIT"
] | 2,023
|
2017-07-29T09:34:46.000Z
|
2022-03-24T08:00:45.000Z
|
recipes/Python/474122_neat/recipe-474122.py
|
unhacker/code
|
73b09edc1b9850c557a79296655f140ce5e853db
|
[
"MIT"
] | 32
|
2017-09-02T17:20:08.000Z
|
2022-02-11T17:49:37.000Z
|
recipes/Python/474122_neat/recipe-474122.py
|
unhacker/code
|
73b09edc1b9850c557a79296655f140ce5e853db
|
[
"MIT"
] | 780
|
2017-07-28T19:23:28.000Z
|
2022-03-25T20:39:41.000Z
|
# nice and clean closure notation
def get_counter_neat():
def f():
f.x += 1
return f.x
f.x = 0
return f
# traditional, not_so_neat closure notation
def get_counter_traditional():
x = [0]
def f():
x[0] += 1
return x[0]
return f
#### EXAMPLE ###########################################################
cnt_a = get_counter_neat()
cnt_b = get_counter_neat()
print cnt_a() # >>> 1
print cnt_a() # >>> 2
print cnt_a() # >>> 3
print cnt_b() # >>> 1
print cnt_a() # >>> 4
print cnt_b() # >>> 2
print cnt_b() # >>> 3
| 20.068966
| 72
| 0.487973
| 83
| 582
| 3.192771
| 0.301205
| 0.211321
| 0.135849
| 0.158491
| 0.211321
| 0
| 0
| 0
| 0
| 0
| 0
| 0.030588
| 0.269759
| 582
| 28
| 73
| 20.785714
| 0.592941
| 0.213058
| 0
| 0.52381
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0
| null | null | 0.333333
| 0
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 2
|
f2903e37d62a64c2678663ac58e60ba0efca0df6
| 206
|
py
|
Python
|
setup.py
|
hemanths933/Segmentation_Unet
|
701585b31df7e4159e2fdbe56aaca99d9a4a8ea9
|
[
"MIT"
] | null | null | null |
setup.py
|
hemanths933/Segmentation_Unet
|
701585b31df7e4159e2fdbe56aaca99d9a4a8ea9
|
[
"MIT"
] | null | null | null |
setup.py
|
hemanths933/Segmentation_Unet
|
701585b31df7e4159e2fdbe56aaca99d9a4a8ea9
|
[
"MIT"
] | null | null | null |
from setuptools import setup
setup(
name='Unet',
version='',
packages=['models'],
url='',
license='',
author='hemanth sharma',
author_email='',
description=''
)
| 15.846154
| 29
| 0.538835
| 18
| 206
| 6.111111
| 0.888889
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.291262
| 206
| 12
| 30
| 17.166667
| 0.753425
| 0
| 0
| 0
| 0
| 0
| 0.123711
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.090909
| 0
| 0.090909
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 2
|
f29ee11e7e85111e249a8c2b4d2fb8ce2bd1370b
| 1,230
|
py
|
Python
|
mopidy_monobox/__init__.py
|
oxullo/mopidy-monobox
|
3cf9077e49afb0f0171f990cc4205cc348dcda1d
|
[
"Apache-2.0"
] | null | null | null |
mopidy_monobox/__init__.py
|
oxullo/mopidy-monobox
|
3cf9077e49afb0f0171f990cc4205cc348dcda1d
|
[
"Apache-2.0"
] | null | null | null |
mopidy_monobox/__init__.py
|
oxullo/mopidy-monobox
|
3cf9077e49afb0f0171f990cc4205cc348dcda1d
|
[
"Apache-2.0"
] | null | null | null |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
import logging
import os
# TODO: Remove entirely if you don't register GStreamer elements below
import pygst
pygst.require('0.10')
import gst
import gobject
from mopidy import config, ext
__version__ = '0.1.0'
# TODO: If you need to log, use loggers named after the current Python module
logger = logging.getLogger(__name__)
class Extension(ext.Extension):
dist_name = 'Mopidy-Monobox'
ext_name = 'monobox'
version = __version__
def get_default_config(self):
conf_file = os.path.join(os.path.dirname(__file__), 'ext.conf')
return config.read(conf_file)
def get_config_schema(self):
schema = super(Extension, self).get_config_schema()
schema['serial_port'] = config.String()
schema['serial_bps'] = config.Integer()
schema['shuffle'] = config.Boolean()
schema['only_playlists'] = config.List(optional=True)
schema['cue_feature'] = config.Boolean()
schema['pulses_trigger'] = config.Integer()
return schema
def setup(self, registry):
from .frontend import MonoboxFrontend
registry.add('frontend', MonoboxFrontend)
| 26.170213
| 77
| 0.688618
| 155
| 1,230
| 5.232258
| 0.56129
| 0.01233
| 0.036991
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.007114
| 0.2
| 1,230
| 46
| 78
| 26.73913
| 0.817073
| 0.152033
| 0
| 0
| 0
| 0
| 0.108758
| 0
| 0
| 0
| 0
| 0.021739
| 0
| 1
| 0.103448
| false
| 0
| 0.275862
| 0
| 0.586207
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 2
|
f2a16388d4271df1ce952f8cf5640d703d0a37c8
| 66
|
py
|
Python
|
nyoka/PMML44/doc/source/scripts/metadata.py
|
maxibor/nyoka
|
19f480eee608035aa5fba368c96d4143bc2f5710
|
[
"Apache-2.0"
] | 71
|
2020-08-24T07:59:56.000Z
|
2022-03-21T08:36:35.000Z
|
nyoka/PMML44/doc/source/scripts/metadata.py
|
maxibor/nyoka
|
19f480eee608035aa5fba368c96d4143bc2f5710
|
[
"Apache-2.0"
] | 16
|
2020-09-02T10:27:36.000Z
|
2022-03-31T05:37:12.000Z
|
nyoka/PMML44/doc/source/scripts/metadata.py
|
nimeshgit/nyoka
|
43bf049825922213eeb3e6a8f39864f9b75d01d5
|
[
"Apache-2.0"
] | 16
|
2020-09-17T15:01:33.000Z
|
2022-03-28T03:13:25.000Z
|
__version__ = '3.1.0rc1'
__license__ = "Apache Software License"
| 16.5
| 39
| 0.742424
| 8
| 66
| 5.125
| 0.875
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.070175
| 0.136364
| 66
| 3
| 40
| 22
| 0.649123
| 0
| 0
| 0
| 0
| 0
| 0.476923
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 2
|
f2ac969d340070fc7df625b368680b8b1a6e1f30
| 323
|
py
|
Python
|
src/ralph/assets/filters.py
|
DoNnMyTh/ralph
|
97b91639fa68965ad3fd9d0d2652a6545a2a5b72
|
[
"Apache-2.0"
] | 1,668
|
2015-01-01T12:51:20.000Z
|
2022-03-29T09:05:35.000Z
|
src/ralph/assets/filters.py
|
hq-git/ralph
|
e2448caf02d6e5abfd81da2cff92aefe0a534883
|
[
"Apache-2.0"
] | 2,314
|
2015-01-02T13:26:26.000Z
|
2022-03-29T04:06:03.000Z
|
src/ralph/assets/filters.py
|
hq-git/ralph
|
e2448caf02d6e5abfd81da2cff92aefe0a534883
|
[
"Apache-2.0"
] | 534
|
2015-01-05T12:40:28.000Z
|
2022-03-29T21:10:12.000Z
|
from ralph.admin.filters import DateListFilter
class BuyoutDateFilter(DateListFilter):
def queryset(self, request, queryset):
queryset = super().queryset(request, queryset)
if queryset is not None:
queryset = queryset.filter(model__category__show_buyout_date=True)
return queryset
| 32.3
| 78
| 0.724458
| 35
| 323
| 6.514286
| 0.714286
| 0.131579
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.204334
| 323
| 9
| 79
| 35.888889
| 0.88716
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.142857
| false
| 0
| 0.142857
| 0
| 0.571429
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 2
|
f2b2b39cb97742f076427952e2dfe5b302a0b56b
| 1,548
|
py
|
Python
|
webapp/vaga_remanescente/views.py
|
prefeiturasp/SME-VagasNaCreche-API
|
20ae8862375124c7459fe6ff2d2d33ed34d136fb
|
[
"0BSD"
] | null | null | null |
webapp/vaga_remanescente/views.py
|
prefeiturasp/SME-VagasNaCreche-API
|
20ae8862375124c7459fe6ff2d2d33ed34d136fb
|
[
"0BSD"
] | 9
|
2020-06-06T00:20:46.000Z
|
2022-02-10T10:57:35.000Z
|
webapp/vaga_remanescente/views.py
|
prefeiturasp/SME-VagasNaCreche-API
|
20ae8862375124c7459fe6ff2d2d33ed34d136fb
|
[
"0BSD"
] | 1
|
2020-09-17T14:46:24.000Z
|
2020-09-17T14:46:24.000Z
|
import pickle
import zlib
from django.core.cache import cache
from fila_da_creche.queries.dt_atualizacao import get_dt_atualizacao
from rest_framework.response import Response
from rest_framework.views import APIView
from vaga_remanescente.queries.distrito import get_distritos
from vaga_remanescente.queries.dre import get_dre
from vaga_remanescente.queries.sub_prefeitura import get_sub_prefeituras
from vaga_remanescente.queries.vaga_por_escolas import get_vaga_por_escolas
class GetVagaByEscola(APIView):
def get(self, request, cd_serie):
resposta = {}
filtro = request.GET.get('filtro', '')
busca = request.GET.get('busca', '')
if cd_serie not in [1, 4, 27, 28]:
return Response('Série invalida')
# Queries no Banco no DW
resposta['escolas'] = get_vaga_por_escolas(filtro, busca, cd_serie)
resposta['dt_atualizacao'] = get_dt_atualizacao()
return Response(resposta)
class GetVagasFilter(APIView):
def get(self, request):
cache_time = 3600
cached_item = cache.get('filtros_vaga')
if not cached_item:
response = {'dres': get_dre(),
'distritos': get_distritos(),
'sub-prefeituras': get_sub_prefeituras()}
cache.set('filtros_vaga', zlib.compress(pickle.dumps(response)), cache_time)
return Response(response)
print('Com cache')
return Response(pickle.loads(zlib.decompress(cached_item)))
| 36
| 89
| 0.672481
| 185
| 1,548
| 5.405405
| 0.356757
| 0.045
| 0.08
| 0.108
| 0.048
| 0
| 0
| 0
| 0
| 0
| 0
| 0.008489
| 0.239018
| 1,548
| 42
| 90
| 36.857143
| 0.840407
| 0.014212
| 0
| 0
| 0
| 0
| 0.0722
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.0625
| false
| 0
| 0.3125
| 0
| 0.5625
| 0.03125
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 2
|
f2b9b881e8f73f67cedd4bd5a979546da0d9dcab
| 988
|
py
|
Python
|
textembedding/__init__.py
|
Hanscal/textembedding
|
0076a7a67e1c0e0b3ebc4bbbfa9dcdcfbf16c4c7
|
[
"MIT"
] | 1
|
2021-05-26T09:42:37.000Z
|
2021-05-26T09:42:37.000Z
|
textembedding/__init__.py
|
Hanscal/textembedding
|
0076a7a67e1c0e0b3ebc4bbbfa9dcdcfbf16c4c7
|
[
"MIT"
] | null | null | null |
textembedding/__init__.py
|
Hanscal/textembedding
|
0076a7a67e1c0e0b3ebc4bbbfa9dcdcfbf16c4c7
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
@Time : 2021/2/26 12:07 下午
@Author : hcai
@Email : [email protected]
"""
import textembedding.get_embedding
import textembedding.load_model
name = "textbedding"
# 加载wv model
def load_word2vect(filepath):
model = textembedding.load_model.load_word2vect(filepath)
return model
# 获取字向量
def get_word_embedding(model,word,min=1,max=3):
word_vector = textembedding.get_embedding.get_word_embedding(model, word, min, max)
return word_vector
# 获取句子向量
def get_sentence_embedding(model, sentence, add_pos_weight=['n','nr','ng','ns','nt','nz'],stop_words_path=None):
sentence_vector = textembedding.get_embedding.get_sentence_embedding(model, sentence, add_pos_weight, stop_words_path)
return sentence_vector
# 获取句子相似度
def get_vector_similarity(query_vec,vec_list,metric_type='cos'):
vector_similarity = textembedding.get_embedding.get_similarity(query_vec,vec_list,metric_type)
return vector_similarity
| 29.058824
| 122
| 0.765182
| 139
| 988
| 5.158273
| 0.460432
| 0.089261
| 0.13947
| 0.117155
| 0.387727
| 0.301255
| 0.223152
| 0.125523
| 0
| 0
| 0
| 0.01837
| 0.118421
| 988
| 34
| 123
| 29.058824
| 0.804822
| 0.15081
| 0
| 0
| 0
| 0
| 0.03023
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.266667
| false
| 0
| 0.133333
| 0
| 0.666667
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 2
|
f2c28b1aa80b1c46d32c2a27e47aa2e4dc3f68c8
| 612
|
py
|
Python
|
ipfinder.py
|
robertas64/realGrowIp
|
bc1f1f4cf30eaa4091a6f81907a39eb8d3b66990
|
[
"MIT"
] | 1
|
2022-03-09T23:21:18.000Z
|
2022-03-09T23:21:18.000Z
|
ipfinder.py
|
njartemis/realGrowIp
|
bc1f1f4cf30eaa4091a6f81907a39eb8d3b66990
|
[
"MIT"
] | null | null | null |
ipfinder.py
|
njartemis/realGrowIp
|
bc1f1f4cf30eaa4091a6f81907a39eb8d3b66990
|
[
"MIT"
] | 1
|
2021-04-16T16:11:24.000Z
|
2021-04-16T16:11:24.000Z
|
# ___ ___ ___ _ _
# |_ _| _ \___| __(_)_ _ __| |___ _ _
# | || _/___| _|| | ' \/ _` / -_) '_|
# |___|_| |_| |_|_||_\__,_\___|_|
# Made by Robertas64
#Importing the module
import os
from time import *
banner = """
___ ___ ___ _ _
|_ _| _ \___| __(_)_ _ __| |___ _ _
| || _/___| _|| | ' \/ _` / -_) '_|
|___|_| |_| |_|_||_\__,_\___|_|
Find GrowtopiaServer Real IP
Author : Robertas64
Make sure you're connected
To GrowtopiaServer hosts
"""
#Main
print(banner)
os.system("ping growtopia1.com")
| 22.666667
| 40
| 0.47549
| 34
| 612
| 5.5
| 0.852941
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.01269
| 0.356209
| 612
| 27
| 41
| 22.666667
| 0.461929
| 0.315359
| 0
| 0
| 0
| 0.071429
| 0.792683
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.142857
| 0
| 0.142857
| 0.071429
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 2
|
4b4b052054557003ef2b409b1b9f8cb5ed96012e
| 48,837
|
py
|
Python
|
tests/schematics_proto3_tests_pb2.py
|
mlga/schematics-proto3
|
588fe5bc212e203688166638a1c52dfeda931403
|
[
"MIT"
] | null | null | null |
tests/schematics_proto3_tests_pb2.py
|
mlga/schematics-proto3
|
588fe5bc212e203688166638a1c52dfeda931403
|
[
"MIT"
] | 11
|
2020-04-09T13:33:54.000Z
|
2020-08-19T17:38:26.000Z
|
tests/schematics_proto3_tests_pb2.py
|
mlga/schematics-proto3
|
588fe5bc212e203688166638a1c52dfeda931403
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
# Generated by the protocol buffer compiler. DO NOT EDIT!
# source: tests/schematics_proto3_tests.proto
import sys
_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1'))
from google.protobuf.internal import enum_type_wrapper
from google.protobuf import descriptor as _descriptor
from google.protobuf import message as _message
from google.protobuf import reflection as _reflection
from google.protobuf import symbol_database as _symbol_database
# @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default()
from google.protobuf import timestamp_pb2 as google_dot_protobuf_dot_timestamp__pb2
from google.protobuf import wrappers_pb2 as google_dot_protobuf_dot_wrappers__pb2
DESCRIPTOR = _descriptor.FileDescriptor(
name='tests/schematics_proto3_tests.proto',
package='schematics_proto3.tests',
syntax='proto3',
serialized_options=None,
serialized_pb=_b('\n#tests/schematics_proto3_tests.proto\x12\x17schematics_proto3.tests\x1a\x1fgoogle/protobuf/timestamp.proto\x1a\x1egoogle/protobuf/wrappers.proto\"e\n\x06Nested\x12\x34\n\x05inner\x18\x01 \x01(\x0b\x32%.schematics_proto3.tests.Nested.Inner\x12\r\n\x05other\x18\x02 \x01(\t\x1a\x16\n\x05Inner\x12\r\n\x05value\x18\x01 \x01(\t\">\n\rWrappedDouble\x12-\n\x07wrapped\x18\x01 \x01(\x0b\x32\x1c.google.protobuf.DoubleValue\"<\n\x0cWrappedFloat\x12,\n\x07wrapped\x18\x01 \x01(\x0b\x32\x1b.google.protobuf.FloatValue\"<\n\x0cWrappedInt64\x12,\n\x07wrapped\x18\x01 \x01(\x0b\x32\x1b.google.protobuf.Int64Value\">\n\rWrappedUInt64\x12-\n\x07wrapped\x18\x01 \x01(\x0b\x32\x1c.google.protobuf.UInt64Value\"<\n\x0cWrappedInt32\x12,\n\x07wrapped\x18\x01 \x01(\x0b\x32\x1b.google.protobuf.Int32Value\">\n\rWrappedUInt32\x12-\n\x07wrapped\x18\x01 \x01(\x0b\x32\x1c.google.protobuf.UInt32Value\":\n\x0bWrappedBool\x12+\n\x07wrapped\x18\x01 \x01(\x0b\x32\x1a.google.protobuf.BoolValue\">\n\rWrappedString\x12-\n\x07wrapped\x18\x01 \x01(\x0b\x32\x1c.google.protobuf.StringValue\"<\n\x0cWrappedBytes\x12,\n\x07wrapped\x18\x01 \x01(\x0b\x32\x1b.google.protobuf.BytesValue\"6\n\tTimestamp\x12)\n\x05value\x18\x01 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\">\n\x11RepeatedTimestamp\x12)\n\x05value\x18\x01 \x03(\x0b\x32\x1a.google.protobuf.Timestamp\"w\n\x0eOneOfTimestamp\x12.\n\x06value1\x18\x01 \x01(\x0b\x32\x1c.google.protobuf.StringValueH\x00\x12,\n\x06value2\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.TimestampH\x00\x42\x07\n\x05inner\"\x17\n\x06\x44ouble\x12\r\n\x05value\x18\x01 \x01(\x01\"\x16\n\x05\x46loat\x12\r\n\x05value\x18\x01 \x01(\x02\"\x16\n\x05Int64\x12\r\n\x05value\x18\x01 \x01(\x03\"\x17\n\x06UInt64\x12\r\n\x05value\x18\x01 \x01(\x04\"\x16\n\x05Int32\x12\r\n\x05value\x18\x01 \x01(\x05\"\x17\n\x06UInt32\x12\r\n\x05value\x18\x01 \x01(\r\"\x15\n\x04\x42ool\x12\r\n\x05value\x18\x01 \x01(\x08\"\x17\n\x06String\x12\r\n\x05value\x18\x01 \x01(\t\"\x16\n\x05\x42ytes\x12\r\n\x05value\x18\x01 \x01(\x0c\"\"\n\x11RepeatedPrimitive\x12\r\n\x05value\x18\x01 \x03(\t\"f\n\x0eRepeatedNested\x12<\n\x05inner\x18\x01 \x03(\x0b\x32-.schematics_proto3.tests.RepeatedNested.Inner\x1a\x16\n\x05Inner\x12\r\n\x05value\x18\x01 \x01(\t\"=\n\x0fRepeatedWrapped\x12*\n\x05value\x18\x01 \x03(\x0b\x32\x1b.google.protobuf.Int32Value\"=\n\x0eOneOfPrimitive\x12\x10\n\x06value1\x18\x01 \x01(\x04H\x00\x12\x10\n\x06value2\x18\x02 \x01(\tH\x00\x42\x07\n\x05inner\"\x9c\x01\n\x0bOneOfNested\x12<\n\x06value1\x18\x01 \x01(\x0b\x32*.schematics_proto3.tests.OneOfNested.InnerH\x00\x12.\n\x06value2\x18\x02 \x01(\x0b\x32\x1c.google.protobuf.StringValueH\x00\x1a\x16\n\x05Inner\x12\r\n\x05value\x18\x01 \x01(\tB\x07\n\x05inner\":\n\nSimpleEnum\x12,\n\x05value\x18\x01 \x01(\x0e\x32\x1d.schematics_proto3.tests.Enum\"<\n\x0cRepeatedEnum\x12,\n\x05value\x18\x01 \x03(\x0e\x32\x1d.schematics_proto3.tests.Enum\"u\n\tOneOfEnum\x12.\n\x06value1\x18\x01 \x01(\x0b\x32\x1c.google.protobuf.StringValueH\x00\x12/\n\x06value2\x18\x02 \x01(\x0e\x32\x1d.schematics_proto3.tests.EnumH\x00\x42\x07\n\x05inner**\n\x04\x45num\x12\x0b\n\x07UNKNOWN\x10\x00\x12\t\n\x05\x46IRST\x10\x01\x12\n\n\x06SECOND\x10\x02\x62\x06proto3')
,
dependencies=[google_dot_protobuf_dot_timestamp__pb2.DESCRIPTOR,google_dot_protobuf_dot_wrappers__pb2.DESCRIPTOR,])
_ENUM = _descriptor.EnumDescriptor(
name='Enum',
full_name='schematics_proto3.tests.Enum',
filename=None,
file=DESCRIPTOR,
values=[
_descriptor.EnumValueDescriptor(
name='UNKNOWN', index=0, number=0,
serialized_options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='FIRST', index=1, number=1,
serialized_options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='SECOND', index=2, number=2,
serialized_options=None,
type=None),
],
containing_type=None,
serialized_options=None,
serialized_start=1922,
serialized_end=1964,
)
_sym_db.RegisterEnumDescriptor(_ENUM)
Enum = enum_type_wrapper.EnumTypeWrapper(_ENUM)
UNKNOWN = 0
FIRST = 1
SECOND = 2
_NESTED_INNER = _descriptor.Descriptor(
name='Inner',
full_name='schematics_proto3.tests.Nested.Inner',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='value', full_name='schematics_proto3.tests.Nested.Inner.value', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=208,
serialized_end=230,
)
_NESTED = _descriptor.Descriptor(
name='Nested',
full_name='schematics_proto3.tests.Nested',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='inner', full_name='schematics_proto3.tests.Nested.inner', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='other', full_name='schematics_proto3.tests.Nested.other', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[_NESTED_INNER, ],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=129,
serialized_end=230,
)
_WRAPPEDDOUBLE = _descriptor.Descriptor(
name='WrappedDouble',
full_name='schematics_proto3.tests.WrappedDouble',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='wrapped', full_name='schematics_proto3.tests.WrappedDouble.wrapped', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=232,
serialized_end=294,
)
_WRAPPEDFLOAT = _descriptor.Descriptor(
name='WrappedFloat',
full_name='schematics_proto3.tests.WrappedFloat',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='wrapped', full_name='schematics_proto3.tests.WrappedFloat.wrapped', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=296,
serialized_end=356,
)
_WRAPPEDINT64 = _descriptor.Descriptor(
name='WrappedInt64',
full_name='schematics_proto3.tests.WrappedInt64',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='wrapped', full_name='schematics_proto3.tests.WrappedInt64.wrapped', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=358,
serialized_end=418,
)
_WRAPPEDUINT64 = _descriptor.Descriptor(
name='WrappedUInt64',
full_name='schematics_proto3.tests.WrappedUInt64',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='wrapped', full_name='schematics_proto3.tests.WrappedUInt64.wrapped', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=420,
serialized_end=482,
)
_WRAPPEDINT32 = _descriptor.Descriptor(
name='WrappedInt32',
full_name='schematics_proto3.tests.WrappedInt32',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='wrapped', full_name='schematics_proto3.tests.WrappedInt32.wrapped', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=484,
serialized_end=544,
)
_WRAPPEDUINT32 = _descriptor.Descriptor(
name='WrappedUInt32',
full_name='schematics_proto3.tests.WrappedUInt32',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='wrapped', full_name='schematics_proto3.tests.WrappedUInt32.wrapped', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=546,
serialized_end=608,
)
_WRAPPEDBOOL = _descriptor.Descriptor(
name='WrappedBool',
full_name='schematics_proto3.tests.WrappedBool',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='wrapped', full_name='schematics_proto3.tests.WrappedBool.wrapped', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=610,
serialized_end=668,
)
_WRAPPEDSTRING = _descriptor.Descriptor(
name='WrappedString',
full_name='schematics_proto3.tests.WrappedString',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='wrapped', full_name='schematics_proto3.tests.WrappedString.wrapped', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=670,
serialized_end=732,
)
_WRAPPEDBYTES = _descriptor.Descriptor(
name='WrappedBytes',
full_name='schematics_proto3.tests.WrappedBytes',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='wrapped', full_name='schematics_proto3.tests.WrappedBytes.wrapped', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=734,
serialized_end=794,
)
_TIMESTAMP = _descriptor.Descriptor(
name='Timestamp',
full_name='schematics_proto3.tests.Timestamp',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='value', full_name='schematics_proto3.tests.Timestamp.value', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=796,
serialized_end=850,
)
_REPEATEDTIMESTAMP = _descriptor.Descriptor(
name='RepeatedTimestamp',
full_name='schematics_proto3.tests.RepeatedTimestamp',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='value', full_name='schematics_proto3.tests.RepeatedTimestamp.value', index=0,
number=1, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=852,
serialized_end=914,
)
_ONEOFTIMESTAMP = _descriptor.Descriptor(
name='OneOfTimestamp',
full_name='schematics_proto3.tests.OneOfTimestamp',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='value1', full_name='schematics_proto3.tests.OneOfTimestamp.value1', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='value2', full_name='schematics_proto3.tests.OneOfTimestamp.value2', index=1,
number=2, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
_descriptor.OneofDescriptor(
name='inner', full_name='schematics_proto3.tests.OneOfTimestamp.inner',
index=0, containing_type=None, fields=[]),
],
serialized_start=916,
serialized_end=1035,
)
_DOUBLE = _descriptor.Descriptor(
name='Double',
full_name='schematics_proto3.tests.Double',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='value', full_name='schematics_proto3.tests.Double.value', index=0,
number=1, type=1, cpp_type=5, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=1037,
serialized_end=1060,
)
_FLOAT = _descriptor.Descriptor(
name='Float',
full_name='schematics_proto3.tests.Float',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='value', full_name='schematics_proto3.tests.Float.value', index=0,
number=1, type=2, cpp_type=6, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=1062,
serialized_end=1084,
)
_INT64 = _descriptor.Descriptor(
name='Int64',
full_name='schematics_proto3.tests.Int64',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='value', full_name='schematics_proto3.tests.Int64.value', index=0,
number=1, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=1086,
serialized_end=1108,
)
_UINT64 = _descriptor.Descriptor(
name='UInt64',
full_name='schematics_proto3.tests.UInt64',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='value', full_name='schematics_proto3.tests.UInt64.value', index=0,
number=1, type=4, cpp_type=4, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=1110,
serialized_end=1133,
)
_INT32 = _descriptor.Descriptor(
name='Int32',
full_name='schematics_proto3.tests.Int32',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='value', full_name='schematics_proto3.tests.Int32.value', index=0,
number=1, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=1135,
serialized_end=1157,
)
_UINT32 = _descriptor.Descriptor(
name='UInt32',
full_name='schematics_proto3.tests.UInt32',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='value', full_name='schematics_proto3.tests.UInt32.value', index=0,
number=1, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=1159,
serialized_end=1182,
)
_BOOL = _descriptor.Descriptor(
name='Bool',
full_name='schematics_proto3.tests.Bool',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='value', full_name='schematics_proto3.tests.Bool.value', index=0,
number=1, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=1184,
serialized_end=1205,
)
_STRING = _descriptor.Descriptor(
name='String',
full_name='schematics_proto3.tests.String',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='value', full_name='schematics_proto3.tests.String.value', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=1207,
serialized_end=1230,
)
_BYTES = _descriptor.Descriptor(
name='Bytes',
full_name='schematics_proto3.tests.Bytes',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='value', full_name='schematics_proto3.tests.Bytes.value', index=0,
number=1, type=12, cpp_type=9, label=1,
has_default_value=False, default_value=_b(""),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=1232,
serialized_end=1254,
)
_REPEATEDPRIMITIVE = _descriptor.Descriptor(
name='RepeatedPrimitive',
full_name='schematics_proto3.tests.RepeatedPrimitive',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='value', full_name='schematics_proto3.tests.RepeatedPrimitive.value', index=0,
number=1, type=9, cpp_type=9, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=1256,
serialized_end=1290,
)
_REPEATEDNESTED_INNER = _descriptor.Descriptor(
name='Inner',
full_name='schematics_proto3.tests.RepeatedNested.Inner',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='value', full_name='schematics_proto3.tests.RepeatedNested.Inner.value', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=208,
serialized_end=230,
)
_REPEATEDNESTED = _descriptor.Descriptor(
name='RepeatedNested',
full_name='schematics_proto3.tests.RepeatedNested',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='inner', full_name='schematics_proto3.tests.RepeatedNested.inner', index=0,
number=1, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[_REPEATEDNESTED_INNER, ],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=1292,
serialized_end=1394,
)
_REPEATEDWRAPPED = _descriptor.Descriptor(
name='RepeatedWrapped',
full_name='schematics_proto3.tests.RepeatedWrapped',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='value', full_name='schematics_proto3.tests.RepeatedWrapped.value', index=0,
number=1, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=1396,
serialized_end=1457,
)
_ONEOFPRIMITIVE = _descriptor.Descriptor(
name='OneOfPrimitive',
full_name='schematics_proto3.tests.OneOfPrimitive',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='value1', full_name='schematics_proto3.tests.OneOfPrimitive.value1', index=0,
number=1, type=4, cpp_type=4, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='value2', full_name='schematics_proto3.tests.OneOfPrimitive.value2', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
_descriptor.OneofDescriptor(
name='inner', full_name='schematics_proto3.tests.OneOfPrimitive.inner',
index=0, containing_type=None, fields=[]),
],
serialized_start=1459,
serialized_end=1520,
)
_ONEOFNESTED_INNER = _descriptor.Descriptor(
name='Inner',
full_name='schematics_proto3.tests.OneOfNested.Inner',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='value', full_name='schematics_proto3.tests.OneOfNested.Inner.value', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=208,
serialized_end=230,
)
_ONEOFNESTED = _descriptor.Descriptor(
name='OneOfNested',
full_name='schematics_proto3.tests.OneOfNested',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='value1', full_name='schematics_proto3.tests.OneOfNested.value1', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='value2', full_name='schematics_proto3.tests.OneOfNested.value2', index=1,
number=2, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[_ONEOFNESTED_INNER, ],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
_descriptor.OneofDescriptor(
name='inner', full_name='schematics_proto3.tests.OneOfNested.inner',
index=0, containing_type=None, fields=[]),
],
serialized_start=1523,
serialized_end=1679,
)
_SIMPLEENUM = _descriptor.Descriptor(
name='SimpleEnum',
full_name='schematics_proto3.tests.SimpleEnum',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='value', full_name='schematics_proto3.tests.SimpleEnum.value', index=0,
number=1, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=1681,
serialized_end=1739,
)
_REPEATEDENUM = _descriptor.Descriptor(
name='RepeatedEnum',
full_name='schematics_proto3.tests.RepeatedEnum',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='value', full_name='schematics_proto3.tests.RepeatedEnum.value', index=0,
number=1, type=14, cpp_type=8, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=1741,
serialized_end=1801,
)
_ONEOFENUM = _descriptor.Descriptor(
name='OneOfEnum',
full_name='schematics_proto3.tests.OneOfEnum',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='value1', full_name='schematics_proto3.tests.OneOfEnum.value1', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='value2', full_name='schematics_proto3.tests.OneOfEnum.value2', index=1,
number=2, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
_descriptor.OneofDescriptor(
name='inner', full_name='schematics_proto3.tests.OneOfEnum.inner',
index=0, containing_type=None, fields=[]),
],
serialized_start=1803,
serialized_end=1920,
)
_NESTED_INNER.containing_type = _NESTED
_NESTED.fields_by_name['inner'].message_type = _NESTED_INNER
_WRAPPEDDOUBLE.fields_by_name['wrapped'].message_type = google_dot_protobuf_dot_wrappers__pb2._DOUBLEVALUE
_WRAPPEDFLOAT.fields_by_name['wrapped'].message_type = google_dot_protobuf_dot_wrappers__pb2._FLOATVALUE
_WRAPPEDINT64.fields_by_name['wrapped'].message_type = google_dot_protobuf_dot_wrappers__pb2._INT64VALUE
_WRAPPEDUINT64.fields_by_name['wrapped'].message_type = google_dot_protobuf_dot_wrappers__pb2._UINT64VALUE
_WRAPPEDINT32.fields_by_name['wrapped'].message_type = google_dot_protobuf_dot_wrappers__pb2._INT32VALUE
_WRAPPEDUINT32.fields_by_name['wrapped'].message_type = google_dot_protobuf_dot_wrappers__pb2._UINT32VALUE
_WRAPPEDBOOL.fields_by_name['wrapped'].message_type = google_dot_protobuf_dot_wrappers__pb2._BOOLVALUE
_WRAPPEDSTRING.fields_by_name['wrapped'].message_type = google_dot_protobuf_dot_wrappers__pb2._STRINGVALUE
_WRAPPEDBYTES.fields_by_name['wrapped'].message_type = google_dot_protobuf_dot_wrappers__pb2._BYTESVALUE
_TIMESTAMP.fields_by_name['value'].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP
_REPEATEDTIMESTAMP.fields_by_name['value'].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP
_ONEOFTIMESTAMP.fields_by_name['value1'].message_type = google_dot_protobuf_dot_wrappers__pb2._STRINGVALUE
_ONEOFTIMESTAMP.fields_by_name['value2'].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP
_ONEOFTIMESTAMP.oneofs_by_name['inner'].fields.append(
_ONEOFTIMESTAMP.fields_by_name['value1'])
_ONEOFTIMESTAMP.fields_by_name['value1'].containing_oneof = _ONEOFTIMESTAMP.oneofs_by_name['inner']
_ONEOFTIMESTAMP.oneofs_by_name['inner'].fields.append(
_ONEOFTIMESTAMP.fields_by_name['value2'])
_ONEOFTIMESTAMP.fields_by_name['value2'].containing_oneof = _ONEOFTIMESTAMP.oneofs_by_name['inner']
_REPEATEDNESTED_INNER.containing_type = _REPEATEDNESTED
_REPEATEDNESTED.fields_by_name['inner'].message_type = _REPEATEDNESTED_INNER
_REPEATEDWRAPPED.fields_by_name['value'].message_type = google_dot_protobuf_dot_wrappers__pb2._INT32VALUE
_ONEOFPRIMITIVE.oneofs_by_name['inner'].fields.append(
_ONEOFPRIMITIVE.fields_by_name['value1'])
_ONEOFPRIMITIVE.fields_by_name['value1'].containing_oneof = _ONEOFPRIMITIVE.oneofs_by_name['inner']
_ONEOFPRIMITIVE.oneofs_by_name['inner'].fields.append(
_ONEOFPRIMITIVE.fields_by_name['value2'])
_ONEOFPRIMITIVE.fields_by_name['value2'].containing_oneof = _ONEOFPRIMITIVE.oneofs_by_name['inner']
_ONEOFNESTED_INNER.containing_type = _ONEOFNESTED
_ONEOFNESTED.fields_by_name['value1'].message_type = _ONEOFNESTED_INNER
_ONEOFNESTED.fields_by_name['value2'].message_type = google_dot_protobuf_dot_wrappers__pb2._STRINGVALUE
_ONEOFNESTED.oneofs_by_name['inner'].fields.append(
_ONEOFNESTED.fields_by_name['value1'])
_ONEOFNESTED.fields_by_name['value1'].containing_oneof = _ONEOFNESTED.oneofs_by_name['inner']
_ONEOFNESTED.oneofs_by_name['inner'].fields.append(
_ONEOFNESTED.fields_by_name['value2'])
_ONEOFNESTED.fields_by_name['value2'].containing_oneof = _ONEOFNESTED.oneofs_by_name['inner']
_SIMPLEENUM.fields_by_name['value'].enum_type = _ENUM
_REPEATEDENUM.fields_by_name['value'].enum_type = _ENUM
_ONEOFENUM.fields_by_name['value1'].message_type = google_dot_protobuf_dot_wrappers__pb2._STRINGVALUE
_ONEOFENUM.fields_by_name['value2'].enum_type = _ENUM
_ONEOFENUM.oneofs_by_name['inner'].fields.append(
_ONEOFENUM.fields_by_name['value1'])
_ONEOFENUM.fields_by_name['value1'].containing_oneof = _ONEOFENUM.oneofs_by_name['inner']
_ONEOFENUM.oneofs_by_name['inner'].fields.append(
_ONEOFENUM.fields_by_name['value2'])
_ONEOFENUM.fields_by_name['value2'].containing_oneof = _ONEOFENUM.oneofs_by_name['inner']
DESCRIPTOR.message_types_by_name['Nested'] = _NESTED
DESCRIPTOR.message_types_by_name['WrappedDouble'] = _WRAPPEDDOUBLE
DESCRIPTOR.message_types_by_name['WrappedFloat'] = _WRAPPEDFLOAT
DESCRIPTOR.message_types_by_name['WrappedInt64'] = _WRAPPEDINT64
DESCRIPTOR.message_types_by_name['WrappedUInt64'] = _WRAPPEDUINT64
DESCRIPTOR.message_types_by_name['WrappedInt32'] = _WRAPPEDINT32
DESCRIPTOR.message_types_by_name['WrappedUInt32'] = _WRAPPEDUINT32
DESCRIPTOR.message_types_by_name['WrappedBool'] = _WRAPPEDBOOL
DESCRIPTOR.message_types_by_name['WrappedString'] = _WRAPPEDSTRING
DESCRIPTOR.message_types_by_name['WrappedBytes'] = _WRAPPEDBYTES
DESCRIPTOR.message_types_by_name['Timestamp'] = _TIMESTAMP
DESCRIPTOR.message_types_by_name['RepeatedTimestamp'] = _REPEATEDTIMESTAMP
DESCRIPTOR.message_types_by_name['OneOfTimestamp'] = _ONEOFTIMESTAMP
DESCRIPTOR.message_types_by_name['Double'] = _DOUBLE
DESCRIPTOR.message_types_by_name['Float'] = _FLOAT
DESCRIPTOR.message_types_by_name['Int64'] = _INT64
DESCRIPTOR.message_types_by_name['UInt64'] = _UINT64
DESCRIPTOR.message_types_by_name['Int32'] = _INT32
DESCRIPTOR.message_types_by_name['UInt32'] = _UINT32
DESCRIPTOR.message_types_by_name['Bool'] = _BOOL
DESCRIPTOR.message_types_by_name['String'] = _STRING
DESCRIPTOR.message_types_by_name['Bytes'] = _BYTES
DESCRIPTOR.message_types_by_name['RepeatedPrimitive'] = _REPEATEDPRIMITIVE
DESCRIPTOR.message_types_by_name['RepeatedNested'] = _REPEATEDNESTED
DESCRIPTOR.message_types_by_name['RepeatedWrapped'] = _REPEATEDWRAPPED
DESCRIPTOR.message_types_by_name['OneOfPrimitive'] = _ONEOFPRIMITIVE
DESCRIPTOR.message_types_by_name['OneOfNested'] = _ONEOFNESTED
DESCRIPTOR.message_types_by_name['SimpleEnum'] = _SIMPLEENUM
DESCRIPTOR.message_types_by_name['RepeatedEnum'] = _REPEATEDENUM
DESCRIPTOR.message_types_by_name['OneOfEnum'] = _ONEOFENUM
DESCRIPTOR.enum_types_by_name['Enum'] = _ENUM
_sym_db.RegisterFileDescriptor(DESCRIPTOR)
Nested = _reflection.GeneratedProtocolMessageType('Nested', (_message.Message,), dict(
Inner = _reflection.GeneratedProtocolMessageType('Inner', (_message.Message,), dict(
DESCRIPTOR = _NESTED_INNER,
__module__ = 'tests.schematics_proto3_tests_pb2'
# @@protoc_insertion_point(class_scope:schematics_proto3.tests.Nested.Inner)
))
,
DESCRIPTOR = _NESTED,
__module__ = 'tests.schematics_proto3_tests_pb2'
# @@protoc_insertion_point(class_scope:schematics_proto3.tests.Nested)
))
_sym_db.RegisterMessage(Nested)
_sym_db.RegisterMessage(Nested.Inner)
WrappedDouble = _reflection.GeneratedProtocolMessageType('WrappedDouble', (_message.Message,), dict(
DESCRIPTOR = _WRAPPEDDOUBLE,
__module__ = 'tests.schematics_proto3_tests_pb2'
# @@protoc_insertion_point(class_scope:schematics_proto3.tests.WrappedDouble)
))
_sym_db.RegisterMessage(WrappedDouble)
WrappedFloat = _reflection.GeneratedProtocolMessageType('WrappedFloat', (_message.Message,), dict(
DESCRIPTOR = _WRAPPEDFLOAT,
__module__ = 'tests.schematics_proto3_tests_pb2'
# @@protoc_insertion_point(class_scope:schematics_proto3.tests.WrappedFloat)
))
_sym_db.RegisterMessage(WrappedFloat)
WrappedInt64 = _reflection.GeneratedProtocolMessageType('WrappedInt64', (_message.Message,), dict(
DESCRIPTOR = _WRAPPEDINT64,
__module__ = 'tests.schematics_proto3_tests_pb2'
# @@protoc_insertion_point(class_scope:schematics_proto3.tests.WrappedInt64)
))
_sym_db.RegisterMessage(WrappedInt64)
WrappedUInt64 = _reflection.GeneratedProtocolMessageType('WrappedUInt64', (_message.Message,), dict(
DESCRIPTOR = _WRAPPEDUINT64,
__module__ = 'tests.schematics_proto3_tests_pb2'
# @@protoc_insertion_point(class_scope:schematics_proto3.tests.WrappedUInt64)
))
_sym_db.RegisterMessage(WrappedUInt64)
WrappedInt32 = _reflection.GeneratedProtocolMessageType('WrappedInt32', (_message.Message,), dict(
DESCRIPTOR = _WRAPPEDINT32,
__module__ = 'tests.schematics_proto3_tests_pb2'
# @@protoc_insertion_point(class_scope:schematics_proto3.tests.WrappedInt32)
))
_sym_db.RegisterMessage(WrappedInt32)
WrappedUInt32 = _reflection.GeneratedProtocolMessageType('WrappedUInt32', (_message.Message,), dict(
DESCRIPTOR = _WRAPPEDUINT32,
__module__ = 'tests.schematics_proto3_tests_pb2'
# @@protoc_insertion_point(class_scope:schematics_proto3.tests.WrappedUInt32)
))
_sym_db.RegisterMessage(WrappedUInt32)
WrappedBool = _reflection.GeneratedProtocolMessageType('WrappedBool', (_message.Message,), dict(
DESCRIPTOR = _WRAPPEDBOOL,
__module__ = 'tests.schematics_proto3_tests_pb2'
# @@protoc_insertion_point(class_scope:schematics_proto3.tests.WrappedBool)
))
_sym_db.RegisterMessage(WrappedBool)
WrappedString = _reflection.GeneratedProtocolMessageType('WrappedString', (_message.Message,), dict(
DESCRIPTOR = _WRAPPEDSTRING,
__module__ = 'tests.schematics_proto3_tests_pb2'
# @@protoc_insertion_point(class_scope:schematics_proto3.tests.WrappedString)
))
_sym_db.RegisterMessage(WrappedString)
WrappedBytes = _reflection.GeneratedProtocolMessageType('WrappedBytes', (_message.Message,), dict(
DESCRIPTOR = _WRAPPEDBYTES,
__module__ = 'tests.schematics_proto3_tests_pb2'
# @@protoc_insertion_point(class_scope:schematics_proto3.tests.WrappedBytes)
))
_sym_db.RegisterMessage(WrappedBytes)
Timestamp = _reflection.GeneratedProtocolMessageType('Timestamp', (_message.Message,), dict(
DESCRIPTOR = _TIMESTAMP,
__module__ = 'tests.schematics_proto3_tests_pb2'
# @@protoc_insertion_point(class_scope:schematics_proto3.tests.Timestamp)
))
_sym_db.RegisterMessage(Timestamp)
RepeatedTimestamp = _reflection.GeneratedProtocolMessageType('RepeatedTimestamp', (_message.Message,), dict(
DESCRIPTOR = _REPEATEDTIMESTAMP,
__module__ = 'tests.schematics_proto3_tests_pb2'
# @@protoc_insertion_point(class_scope:schematics_proto3.tests.RepeatedTimestamp)
))
_sym_db.RegisterMessage(RepeatedTimestamp)
OneOfTimestamp = _reflection.GeneratedProtocolMessageType('OneOfTimestamp', (_message.Message,), dict(
DESCRIPTOR = _ONEOFTIMESTAMP,
__module__ = 'tests.schematics_proto3_tests_pb2'
# @@protoc_insertion_point(class_scope:schematics_proto3.tests.OneOfTimestamp)
))
_sym_db.RegisterMessage(OneOfTimestamp)
Double = _reflection.GeneratedProtocolMessageType('Double', (_message.Message,), dict(
DESCRIPTOR = _DOUBLE,
__module__ = 'tests.schematics_proto3_tests_pb2'
# @@protoc_insertion_point(class_scope:schematics_proto3.tests.Double)
))
_sym_db.RegisterMessage(Double)
Float = _reflection.GeneratedProtocolMessageType('Float', (_message.Message,), dict(
DESCRIPTOR = _FLOAT,
__module__ = 'tests.schematics_proto3_tests_pb2'
# @@protoc_insertion_point(class_scope:schematics_proto3.tests.Float)
))
_sym_db.RegisterMessage(Float)
Int64 = _reflection.GeneratedProtocolMessageType('Int64', (_message.Message,), dict(
DESCRIPTOR = _INT64,
__module__ = 'tests.schematics_proto3_tests_pb2'
# @@protoc_insertion_point(class_scope:schematics_proto3.tests.Int64)
))
_sym_db.RegisterMessage(Int64)
UInt64 = _reflection.GeneratedProtocolMessageType('UInt64', (_message.Message,), dict(
DESCRIPTOR = _UINT64,
__module__ = 'tests.schematics_proto3_tests_pb2'
# @@protoc_insertion_point(class_scope:schematics_proto3.tests.UInt64)
))
_sym_db.RegisterMessage(UInt64)
Int32 = _reflection.GeneratedProtocolMessageType('Int32', (_message.Message,), dict(
DESCRIPTOR = _INT32,
__module__ = 'tests.schematics_proto3_tests_pb2'
# @@protoc_insertion_point(class_scope:schematics_proto3.tests.Int32)
))
_sym_db.RegisterMessage(Int32)
UInt32 = _reflection.GeneratedProtocolMessageType('UInt32', (_message.Message,), dict(
DESCRIPTOR = _UINT32,
__module__ = 'tests.schematics_proto3_tests_pb2'
# @@protoc_insertion_point(class_scope:schematics_proto3.tests.UInt32)
))
_sym_db.RegisterMessage(UInt32)
Bool = _reflection.GeneratedProtocolMessageType('Bool', (_message.Message,), dict(
DESCRIPTOR = _BOOL,
__module__ = 'tests.schematics_proto3_tests_pb2'
# @@protoc_insertion_point(class_scope:schematics_proto3.tests.Bool)
))
_sym_db.RegisterMessage(Bool)
String = _reflection.GeneratedProtocolMessageType('String', (_message.Message,), dict(
DESCRIPTOR = _STRING,
__module__ = 'tests.schematics_proto3_tests_pb2'
# @@protoc_insertion_point(class_scope:schematics_proto3.tests.String)
))
_sym_db.RegisterMessage(String)
Bytes = _reflection.GeneratedProtocolMessageType('Bytes', (_message.Message,), dict(
DESCRIPTOR = _BYTES,
__module__ = 'tests.schematics_proto3_tests_pb2'
# @@protoc_insertion_point(class_scope:schematics_proto3.tests.Bytes)
))
_sym_db.RegisterMessage(Bytes)
RepeatedPrimitive = _reflection.GeneratedProtocolMessageType('RepeatedPrimitive', (_message.Message,), dict(
DESCRIPTOR = _REPEATEDPRIMITIVE,
__module__ = 'tests.schematics_proto3_tests_pb2'
# @@protoc_insertion_point(class_scope:schematics_proto3.tests.RepeatedPrimitive)
))
_sym_db.RegisterMessage(RepeatedPrimitive)
RepeatedNested = _reflection.GeneratedProtocolMessageType('RepeatedNested', (_message.Message,), dict(
Inner = _reflection.GeneratedProtocolMessageType('Inner', (_message.Message,), dict(
DESCRIPTOR = _REPEATEDNESTED_INNER,
__module__ = 'tests.schematics_proto3_tests_pb2'
# @@protoc_insertion_point(class_scope:schematics_proto3.tests.RepeatedNested.Inner)
))
,
DESCRIPTOR = _REPEATEDNESTED,
__module__ = 'tests.schematics_proto3_tests_pb2'
# @@protoc_insertion_point(class_scope:schematics_proto3.tests.RepeatedNested)
))
_sym_db.RegisterMessage(RepeatedNested)
_sym_db.RegisterMessage(RepeatedNested.Inner)
RepeatedWrapped = _reflection.GeneratedProtocolMessageType('RepeatedWrapped', (_message.Message,), dict(
DESCRIPTOR = _REPEATEDWRAPPED,
__module__ = 'tests.schematics_proto3_tests_pb2'
# @@protoc_insertion_point(class_scope:schematics_proto3.tests.RepeatedWrapped)
))
_sym_db.RegisterMessage(RepeatedWrapped)
OneOfPrimitive = _reflection.GeneratedProtocolMessageType('OneOfPrimitive', (_message.Message,), dict(
DESCRIPTOR = _ONEOFPRIMITIVE,
__module__ = 'tests.schematics_proto3_tests_pb2'
# @@protoc_insertion_point(class_scope:schematics_proto3.tests.OneOfPrimitive)
))
_sym_db.RegisterMessage(OneOfPrimitive)
OneOfNested = _reflection.GeneratedProtocolMessageType('OneOfNested', (_message.Message,), dict(
Inner = _reflection.GeneratedProtocolMessageType('Inner', (_message.Message,), dict(
DESCRIPTOR = _ONEOFNESTED_INNER,
__module__ = 'tests.schematics_proto3_tests_pb2'
# @@protoc_insertion_point(class_scope:schematics_proto3.tests.OneOfNested.Inner)
))
,
DESCRIPTOR = _ONEOFNESTED,
__module__ = 'tests.schematics_proto3_tests_pb2'
# @@protoc_insertion_point(class_scope:schematics_proto3.tests.OneOfNested)
))
_sym_db.RegisterMessage(OneOfNested)
_sym_db.RegisterMessage(OneOfNested.Inner)
SimpleEnum = _reflection.GeneratedProtocolMessageType('SimpleEnum', (_message.Message,), dict(
DESCRIPTOR = _SIMPLEENUM,
__module__ = 'tests.schematics_proto3_tests_pb2'
# @@protoc_insertion_point(class_scope:schematics_proto3.tests.SimpleEnum)
))
_sym_db.RegisterMessage(SimpleEnum)
RepeatedEnum = _reflection.GeneratedProtocolMessageType('RepeatedEnum', (_message.Message,), dict(
DESCRIPTOR = _REPEATEDENUM,
__module__ = 'tests.schematics_proto3_tests_pb2'
# @@protoc_insertion_point(class_scope:schematics_proto3.tests.RepeatedEnum)
))
_sym_db.RegisterMessage(RepeatedEnum)
OneOfEnum = _reflection.GeneratedProtocolMessageType('OneOfEnum', (_message.Message,), dict(
DESCRIPTOR = _ONEOFENUM,
__module__ = 'tests.schematics_proto3_tests_pb2'
# @@protoc_insertion_point(class_scope:schematics_proto3.tests.OneOfEnum)
))
_sym_db.RegisterMessage(OneOfEnum)
# @@protoc_insertion_point(module_scope)
| 33.750518
| 3,198
| 0.757377
| 5,740
| 48,837
| 6.097735
| 0.053484
| 0.035428
| 0.091197
| 0.052113
| 0.743779
| 0.698237
| 0.63541
| 0.61144
| 0.59904
| 0.588669
| 0
| 0.038589
| 0.118107
| 48,837
| 1,446
| 3,199
| 33.773859
| 0.774083
| 0.053853
| 0
| 0.654615
| 1
| 0.000769
| 0.19507
| 0.15606
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.006154
| 0
| 0.006154
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 2
|
4b5cbe04dec6e7c55b09522e71410c55307b8fa0
| 302
|
py
|
Python
|
shop/views/shop_views.py
|
cuescience/cuescience-shop
|
bf5ea159f9277d1d6ab7acfcad3f2517723a225c
|
[
"MIT"
] | null | null | null |
shop/views/shop_views.py
|
cuescience/cuescience-shop
|
bf5ea159f9277d1d6ab7acfcad3f2517723a225c
|
[
"MIT"
] | null | null | null |
shop/views/shop_views.py
|
cuescience/cuescience-shop
|
bf5ea159f9277d1d6ab7acfcad3f2517723a225c
|
[
"MIT"
] | null | null | null |
from shop.models import Product
from django.shortcuts import render_to_response
from django.template import RequestContext
def index_view(request):
products = Product.objects.all()
return render_to_response("cuescience_shop/index.html", RequestContext(request, {"products": products}))
| 20.133333
| 108
| 0.788079
| 37
| 302
| 6.27027
| 0.594595
| 0.086207
| 0.137931
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.125828
| 302
| 14
| 109
| 21.571429
| 0.878788
| 0
| 0
| 0
| 0
| 0
| 0.114865
| 0.087838
| 0
| 0
| 0
| 0
| 0
| 1
| 0.166667
| false
| 0
| 0.5
| 0
| 0.833333
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 2
|
4b5d16684195ca44a761cc1ab6755c005952e4d5
| 163
|
py
|
Python
|
qrogue/test/statevector_tests.py
|
7Magic7Mike7/Qrogue
|
70bd5671a77981c1d4b633246321ba44f13c21ff
|
[
"MIT"
] | 4
|
2021-12-14T19:13:43.000Z
|
2022-02-16T13:25:38.000Z
|
qrogue/test/statevector_tests.py
|
7Magic7Mike7/Qrogue
|
70bd5671a77981c1d4b633246321ba44f13c21ff
|
[
"MIT"
] | null | null | null |
qrogue/test/statevector_tests.py
|
7Magic7Mike7/Qrogue
|
70bd5671a77981c1d4b633246321ba44f13c21ff
|
[
"MIT"
] | 1
|
2022-01-04T18:35:51.000Z
|
2022-01-04T18:35:51.000Z
|
import numpy as np
from qrogue.game.logic.actors import StateVector
stv = StateVector([1 / np.sqrt(2), 0 + 0j, 0 + 0j, 1 / np.sqrt(2)])
#stv.extend(1)
print(stv)
| 23.285714
| 67
| 0.680982
| 30
| 163
| 3.7
| 0.6
| 0.054054
| 0.126126
| 0.144144
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.065217
| 0.153374
| 163
| 7
| 68
| 23.285714
| 0.73913
| 0.079755
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.5
| 0
| 0.5
| 0.25
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
|
0
| 2
|
4b70e648e25de3717c9f7effa2fbe1723459da27
| 344
|
py
|
Python
|
tests/system/examples/dask/assets/dask_function.py
|
Hedingber/mlrun
|
e2269718fcc7caa7e1aa379ac28495830b45f9da
|
[
"Apache-2.0"
] | 1
|
2021-02-17T08:12:33.000Z
|
2021-02-17T08:12:33.000Z
|
tests/system/examples/dask/assets/dask_function.py
|
Hedingber/mlrun
|
e2269718fcc7caa7e1aa379ac28495830b45f9da
|
[
"Apache-2.0"
] | 1
|
2020-12-31T14:36:29.000Z
|
2020-12-31T14:36:29.000Z
|
tests/system/examples/dask/assets/dask_function.py
|
Hedingber/mlrun
|
e2269718fcc7caa7e1aa379ac28495830b45f9da
|
[
"Apache-2.0"
] | 1
|
2021-08-30T21:43:38.000Z
|
2021-08-30T21:43:38.000Z
|
# function that will be distributed
def inc(x):
return x + 2
# wrapper function, uses the dask client object
def main(context, x=1, y=2):
context.logger.info(f"params: x={x},y={y}")
print(f"params: x={x},y={y}")
x = context.dask_client.submit(inc, x)
print(x)
print(x.result())
context.log_result("y", x.result())
| 24.571429
| 47
| 0.627907
| 59
| 344
| 3.627119
| 0.474576
| 0.037383
| 0.074766
| 0.084112
| 0.102804
| 0.102804
| 0
| 0
| 0
| 0
| 0
| 0.010791
| 0.19186
| 344
| 13
| 48
| 26.461538
| 0.758993
| 0.229651
| 0
| 0
| 0
| 0
| 0.148855
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.222222
| false
| 0
| 0
| 0.111111
| 0.333333
| 0.333333
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
|
0
| 2
|
4b73785d9dd8a4aaaf6a1aac49dbeb16165c0050
| 1,423
|
py
|
Python
|
demo/demo/urls.py
|
AlanCoding/Example-Django-App
|
1cca52b720d1b117ccce780618d9af94f584ac2c
|
[
"MIT"
] | null | null | null |
demo/demo/urls.py
|
AlanCoding/Example-Django-App
|
1cca52b720d1b117ccce780618d9af94f584ac2c
|
[
"MIT"
] | null | null | null |
demo/demo/urls.py
|
AlanCoding/Example-Django-App
|
1cca52b720d1b117ccce780618d9af94f584ac2c
|
[
"MIT"
] | null | null | null |
"""demo URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/1.10/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: url(r'^$', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.conf.urls import url, include
2. Add a URL to urlpatterns: url(r'^blog/', include('blog.urls'))
"""
from django.conf.urls import url, include
from django.conf import settings
from django.contrib import admin
from django.views.generic import TemplateView
import debug_toolbar
from rockband import rocking_urls
# from movies import urls as movie_urls
from async import async_urls
urlpatterns = [
url(r'^$', TemplateView.as_view(template_name="index.html")),
url(r'^admin/', admin.site.urls),
# Rock band urls
url(r'^bands/', include(rocking_urls)),
# asynchronous demo app
url(r'^async/', include(async_urls)),
# url(r'$movies/', include(movie_urls))
# Django auth views
url('^', include('django.contrib.auth.urls')),
]
# For the debug toolbar
if settings.DEBUG:
urlpatterns += [
url(r'^__debug__/', include(debug_toolbar.urls)),
]
| 33.093023
| 79
| 0.702741
| 209
| 1,423
| 4.703349
| 0.339713
| 0.036623
| 0.076297
| 0.024415
| 0.188199
| 0.188199
| 0.145473
| 0.076297
| 0
| 0
| 0
| 0.007621
| 0.170063
| 1,423
| 42
| 80
| 33.880952
| 0.824725
| 0.106817
| 0
| 0
| 0
| 0
| 0.110048
| 0.038278
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0.388889
| null | null | 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
| 0
|
0
| 2
|
4b786431aa3dbf51672c3a6c4d1ccbdb01c1f809
| 7,865
|
py
|
Python
|
todo/views.py
|
Azarn/mytodo
|
599b5017b9a952100f05a6180dba5bca0823ad70
|
[
"Apache-2.0"
] | null | null | null |
todo/views.py
|
Azarn/mytodo
|
599b5017b9a952100f05a6180dba5bca0823ad70
|
[
"Apache-2.0"
] | null | null | null |
todo/views.py
|
Azarn/mytodo
|
599b5017b9a952100f05a6180dba5bca0823ad70
|
[
"Apache-2.0"
] | null | null | null |
import logging
from rest_framework import mixins, generics, permissions, exceptions
from django.conf import settings
from django.utils import timezone
from .serializers import CategorySerializer, TagSerializer, TodoSerializer
from .models import Category, Tag, Todo
logger = logging.getLogger(__name__)
class MyGenericApiView(generics.GenericAPIView):
# Disabling "options" method
metadata_class = None
def initial(self, request, *args, **kwargs):
super().initial(request, *args, **kwargs)
timezone.activate(request.user.profile.timezone)
@staticmethod
def _raise_invalid_param(param_name):
raise exceptions.ParseError('parameter `{0}` is invalid'.format(param_name))
def parse_get_int(self, param_name, default=None):
param = self.request.query_params.get(param_name, default)
if param != default:
try:
param = int(param)
except ValueError:
self._raise_invalid_param(param_name)
return param
def parse_get_bool(self, param_name, default=None):
param = self.parse_get_int(param_name, default)
if param != default:
if param not in (0, 1):
self._raise_invalid_param(param_name)
param = bool(param)
return param
# Hiding "options" from available methods
@property
def allowed_methods(self):
methods = super().allowed_methods
methods.remove('OPTIONS')
return methods
class CategoryList(mixins.ListModelMixin,
mixins.CreateModelMixin,
MyGenericApiView):
serializer_class = CategorySerializer
permission_classes = (permissions.IsAuthenticated,)
def get_queryset(self):
return Category.objects.filter(user=self.request.user)
def perform_create(self, serializer):
serializer.save(user=self.request.user)
def get(self, request, *args, **kwargs):
return self.list(request, args, kwargs)
def post(self, request, *args, **kwargs):
return self.create(request, *args, **kwargs)
class CategoryDetail(mixins.RetrieveModelMixin,
mixins.UpdateModelMixin,
mixins.DestroyModelMixin,
MyGenericApiView):
serializer_class = CategorySerializer
permission_classes = (permissions.IsAuthenticated,)
def get_queryset(self):
return Category.objects.filter(user=self.request.user)
def get(self, request, *args, **kwargs):
return self.retrieve(request, *args, **kwargs)
def put(self, request, *args, **kwargs):
return self.update(request, *args, partial=True, **kwargs)
def delete(self, request, *args, **kwargs):
return self.destroy(request, *args, **kwargs)
class TagList(mixins.ListModelMixin,
mixins.CreateModelMixin,
MyGenericApiView):
serializer_class = TagSerializer
permission_classes = (permissions.IsAuthenticated,)
def get_queryset(self):
return Tag.objects.filter(user=self.request.user)
def perform_create(self, serializer):
serializer.save(user=self.request.user)
def get(self, request, *args, **kwargs):
return self.list(request, args, kwargs)
def post(self, request, *args, **kwargs):
return self.create(request, *args, **kwargs)
class TagDetail(mixins.RetrieveModelMixin,
mixins.UpdateModelMixin,
mixins.DestroyModelMixin,
MyGenericApiView):
serializer_class = TagSerializer
permission_classes = (permissions.IsAuthenticated,)
def get_queryset(self):
return Tag.objects.filter(user=self.request.user)
def get(self, request, *args, **kwargs):
return self.retrieve(request, *args, **kwargs)
def put(self, request, *args, **kwargs):
return self.update(request, *args, partial=True, **kwargs)
def delete(self, request, *args, **kwargs):
return self.destroy(request, *args, **kwargs)
class TodoList(mixins.ListModelMixin,
mixins.CreateModelMixin,
MyGenericApiView):
serializer_class = TodoSerializer
permission_classes = (permissions.IsAuthenticated,)
def get_queryset(self):
"""
Gets query according to GET params
Available GET params:
only_done: if specified, todos will be filtered by `todo.is_done` = only_done
category: if specified todos will be filtered by this category
tags: if specified todos will be filtered by this tags list
only_one_day: if specified changes behaviour of by_date(see below) to show todos only for one day
by_date: if specified todos will be filtered by this date,
if it is equal to `None`, filters todos without deadline
:return: queryset
"""
q = Todo.objects.filter(user=self.request.user)
only_done = self.parse_get_bool('only_done')
only_one_day = self.parse_get_bool('only_one_day', False)
category = self.request.query_params.get('category')
tags = self.request.query_params.getlist('tags')
by_date = self.request.query_params.get('by_date')
if only_done is not None:
if only_done:
q = q.filter(is_done=True)
else:
q = q.filter(is_done=False)
if category is not None:
try:
category = int(category)
except ValueError:
raise exceptions.ParseError('parameter `category` is invalid')
else:
q = q.filter(category__pk=category)
if tags:
try:
tags = list(map(int, tags))
except ValueError:
raise exceptions.ParseError('parameter `tags` is invalid')
else:
for t in tags:
q = q.filter(tags__pk=t)
if by_date is not None:
if by_date in ('today', 'tomorrow', 'week', 'none'):
date = timezone.localtime(timezone.now())
else:
try:
date = timezone.datetime.strptime(by_date, settings.DATE_FORMAT)
except TypeError:
raise exceptions.ParseError('parameter `by_date` is invalid')
date = timezone.make_aware(timezone.datetime.combine(date, timezone.datetime.max.time()))
if by_date == 'tomorrow':
date += timezone.timedelta(days=1)
elif by_date == 'week':
date += timezone.timedelta(days=6)
logger.warn(str(date))
if by_date == 'none':
q = q.filter(deadline__isnull=True)
elif only_one_day:
q = q.filter(deadline__date=date)
else:
q = q.filter(deadline__lte=date)
return q.prefetch_related('tags')
def perform_create(self, serializer):
serializer.save(user=self.request.user)
def get(self, request, *args, **kwargs):
return self.list(request, args, kwargs)
def post(self, request, *args, **kwargs):
return self.create(request, *args, **kwargs)
class TodoDetail(mixins.RetrieveModelMixin,
mixins.UpdateModelMixin,
mixins.DestroyModelMixin,
MyGenericApiView):
serializer_class = TodoSerializer
permission_classes = (permissions.IsAuthenticated,)
def get_queryset(self):
return Todo.objects.filter(user=self.request.user)
def get(self, request, *args, **kwargs):
return self.retrieve(request, *args, **kwargs)
def put(self, request, *args, **kwargs):
return self.update(request, *args, partial=True, **kwargs)
def delete(self, request, *args, **kwargs):
return self.destroy(request, *args, **kwargs)
| 34.047619
| 105
| 0.628608
| 861
| 7,865
| 5.624855
| 0.186992
| 0.072682
| 0.101796
| 0.069378
| 0.615321
| 0.581251
| 0.535825
| 0.481313
| 0.459013
| 0.415032
| 0
| 0.000874
| 0.272346
| 7,865
| 230
| 106
| 34.195652
| 0.845361
| 0.070947
| 0
| 0.561728
| 0
| 0
| 0.027986
| 0
| 0
| 0
| 0
| 0.026087
| 0
| 1
| 0.179012
| false
| 0
| 0.037037
| 0.123457
| 0.487654
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
|
0
| 2
|
4b7b56b22f9f50b0ab5dcb31b7bb4cdc39078ed0
| 2,014
|
py
|
Python
|
2_writeups/4_other/pdf/pipeline.py
|
araujorayza/robot_hacking_manual
|
d11feecc8931b1449b0ab30a51a55f71f51dd965
|
[
"Apache-2.0"
] | 141
|
2021-11-14T15:27:04.000Z
|
2022-03-30T00:44:48.000Z
|
2_writeups/4_other/pdf/pipeline.py
|
araujorayza/robot_hacking_manual
|
d11feecc8931b1449b0ab30a51a55f71f51dd965
|
[
"Apache-2.0"
] | 1
|
2021-11-17T06:38:44.000Z
|
2021-11-17T06:38:45.000Z
|
2_writeups/4_other/pdf/pipeline.py
|
araujorayza/robot_hacking_manual
|
d11feecc8931b1449b0ab30a51a55f71f51dd965
|
[
"Apache-2.0"
] | 18
|
2021-11-15T09:55:48.000Z
|
2022-03-08T10:25:58.000Z
|
"""
Script to generate a security pipeline for PDF files.
It does the following:
- Adds specified meta-data
- Encrypts file
Run:
python3 pipeline.py
"""
from PyPDF2 import PdfFileWriter, PdfFileReader
from PyPDF2.generic import NameObject, createStringObject
def encrypt(input_pdf, output_pdf, password):
pdf_writer = PdfFileWriter()
pdf_reader = PdfFileReader(input_pdf)
for page in range(pdf_reader.getNumPages()):
pdf_writer.addPage(pdf_reader.getPage(page))
pdf_writer.encrypt(user_pwd=password, owner_pwd=None,
use_128bit=True)
with open(output_pdf, 'wb') as fh:
pdf_writer.write(fh)
def meta(input_pdf, output_pdf, value):
pdf_writer = PdfFileWriter()
pdf_reader = PdfFileReader(input_pdf)
for page in range(pdf_reader.getNumPages()):
pdf_writer.addPage(pdf_reader.getPage(page))
# pdf_writer.encrypt(user_pwd=password, owner_pwd=None,
# use_128bit=True)
infoDict = pdf_writer._info.getObject()
infoDict.update({NameObject('/Version'): createStringObject(u'234ds2')})
info = pdf_reader.documentInfo
for key in info:
infoDict.update({NameObject(key): createStringObject(info[key])})
# add the grade
# infoDict.update({NameObject('/Grade'): createStringObject(u'A+')})
# infoDict.update({NameObject('/Grade2'): createStringObject(u'A+')})
infoDict.update({NameObject('/Key'): createStringObject(value)})
with open(output_pdf, 'wb') as fh:
pdf_writer.write(fh)
if __name__ == '__main__':
# path for the file to process
filepath = "/Users/victor/Desktop/Apex.AI_Threat_Model_AliasRobotics.pdf"
# meta-data-value
meta_value = u'HitachiVentures'
meta(input_pdf=filepath,
output_pdf=filepath+"underNDA.pdf",
value=meta_value)
encrypt(input_pdf=filepath+"underNDA.pdf",
output_pdf=filepath+"underNDA_encrypted.pdf",
password='4l14srobotics')
| 28.366197
| 77
| 0.682224
| 240
| 2,014
| 5.520833
| 0.3625
| 0.061132
| 0.090566
| 0.02566
| 0.443774
| 0.393962
| 0.327547
| 0.327547
| 0.327547
| 0.327547
| 0
| 0.010599
| 0.203575
| 2,014
| 70
| 78
| 28.771429
| 0.815461
| 0.215988
| 0
| 0.363636
| 1
| 0
| 0.104792
| 0.052396
| 0
| 0
| 0
| 0
| 0
| 1
| 0.060606
| false
| 0.090909
| 0.060606
| 0
| 0.121212
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 2
|
4b81b5fe4aceb22e7a99ad217502d745e5a0019f
| 1,069
|
py
|
Python
|
spotty/commands/abstract_command.py
|
Inculus/spotty
|
56863012668a6c13ad13c2a04f900047e229fbe6
|
[
"MIT"
] | 246
|
2018-09-03T09:09:48.000Z
|
2020-07-18T21:07:15.000Z
|
spotty/commands/abstract_command.py
|
Inculus/spotty
|
56863012668a6c13ad13c2a04f900047e229fbe6
|
[
"MIT"
] | 42
|
2018-10-09T19:41:56.000Z
|
2020-06-15T22:55:58.000Z
|
spotty/commands/abstract_command.py
|
Inculus/spotty
|
56863012668a6c13ad13c2a04f900047e229fbe6
|
[
"MIT"
] | 27
|
2018-10-09T22:16:40.000Z
|
2020-06-08T22:26:00.000Z
|
from abc import ABC, abstractmethod
from argparse import Namespace, ArgumentParser
from spotty.commands.writers.abstract_output_writrer import AbstractOutputWriter
class AbstractCommand(ABC):
"""Abstract class for a Spotty sub-command."""
@property
@abstractmethod
def name(self) -> str:
"""The sub-command name."""
raise NotImplementedError
@property
def description(self) -> str:
"""The sub-command description. It will be displayed in the help text."""
return ''
def configure(self, parser: ArgumentParser):
"""Adds arguments for the sub-command."""
parser.add_argument('-d', '--debug', action='store_true', help='Show debug messages')
@abstractmethod
def run(self, args: Namespace, output: AbstractOutputWriter):
"""Runs the sub-command.
Args:
args: Arguments provided by argparse.
output: Output writer.
Raises:
ValueError: If command's arguments can't be processed.
"""
raise NotImplementedError
| 30.542857
| 93
| 0.654818
| 115
| 1,069
| 6.052174
| 0.547826
| 0.071839
| 0.074713
| 0.037356
| 0.057471
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.246024
| 1,069
| 34
| 94
| 31.441176
| 0.863524
| 0.309635
| 0
| 0.375
| 0
| 0
| 0.057315
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.25
| false
| 0
| 0.1875
| 0
| 0.5625
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 2
|
4b92d579f1edf869213b966f3c57e11cb659219d
| 1,048
|
py
|
Python
|
Day18/main.py
|
MHKomeili/100DaysofCode
|
a5799011a43f777ddc5ac9e649aa27291313b62b
|
[
"MIT"
] | null | null | null |
Day18/main.py
|
MHKomeili/100DaysofCode
|
a5799011a43f777ddc5ac9e649aa27291313b62b
|
[
"MIT"
] | null | null | null |
Day18/main.py
|
MHKomeili/100DaysofCode
|
a5799011a43f777ddc5ac9e649aa27291313b62b
|
[
"MIT"
] | null | null | null |
# import colorgram
#
# colors = colorgram.extract('image.jpg', 30)
# rgb_colors = []
# for color in colors:
# rgb_colors.append((color.rgb.r, color.rgb.g, color.rgb.b))
#
# print(rgb_colors)
from turtle import Turtle, Screen
import random
color_list = [(238, 251, 245), (250, 228, 15), (213, 12, 8), (199, 11, 36), (10, 98, 61), (5, 39, 32), (232, 228, 5),
(64, 221, 157), (198, 68, 19), (32, 91, 189), (43, 212, 71), (235, 148, 38), (32, 30, 153),
(242, 247, 251), (15, 22, 54), (67, 9, 49), (245, 38, 148), (14, 206, 222), (65, 203, 230), (62, 20, 10),
(229, 164, 7), (226, 19, 111), (14, 154, 22), (246, 58, 14), (98, 75, 8), (248, 11, 9), (223, 140, 205),
(66, 241, 160),
]
tim = Turtle()
scr = Screen()
scr.colormode(255)
tim.penup()
tim.hideturtle()
tim.setposition(-300, -300)
for i in range(10):
tim.setposition(-300, tim.ycor() + 50)
for j in range(10):
tim.setx(tim.xcor() + 50)
tim.dot(20, random.choice(color_list))
scr.exitonclick()
| 31.757576
| 119
| 0.540076
| 167
| 1,048
| 3.359281
| 0.622754
| 0.048128
| 0.060606
| 0.042781
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.279449
| 0.23855
| 1,048
| 33
| 120
| 31.757576
| 0.423559
| 0.169847
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.1
| 0
| 0.1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 2
|
4ba3e0dab8146008256a0da74d6aec2d33aa11e9
| 127
|
py
|
Python
|
appending_to_files.py
|
jaiminjerry/Python
|
eb7013c7560b09d37849d653516257d939e143aa
|
[
"bzip2-1.0.6"
] | null | null | null |
appending_to_files.py
|
jaiminjerry/Python
|
eb7013c7560b09d37849d653516257d939e143aa
|
[
"bzip2-1.0.6"
] | null | null | null |
appending_to_files.py
|
jaiminjerry/Python
|
eb7013c7560b09d37849d653516257d939e143aa
|
[
"bzip2-1.0.6"
] | 1
|
2021-08-17T03:46:56.000Z
|
2021-08-17T03:46:56.000Z
|
appendMe = '\nNew bit of information'
appendFile = open('example.txt','a')
appendFile.write(appendMe)
appendFile.close()
| 21.166667
| 38
| 0.716535
| 15
| 127
| 6.066667
| 0.8
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.133858
| 127
| 5
| 39
| 25.4
| 0.827273
| 0
| 0
| 0
| 0
| 0
| 0.295082
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 2
|
4ba51c782d7e269031d5abf6080e2a03357844fd
| 849
|
py
|
Python
|
mcir/t1_hist.py
|
omritomer/mcir
|
1554d352172464c6314339195d6ea9a5e00824af
|
[
"MIT"
] | null | null | null |
mcir/t1_hist.py
|
omritomer/mcir
|
1554d352172464c6314339195d6ea9a5e00824af
|
[
"MIT"
] | null | null | null |
mcir/t1_hist.py
|
omritomer/mcir
|
1554d352172464c6314339195d6ea9a5e00824af
|
[
"MIT"
] | null | null | null |
import numpy as np
class T1Hist:
def __init__(
self, max_t1: float = 3000, min_t1: float = 200
):
self.max_t1 = max_t1
self.min_t1 = min_t1
def get_t1_histogram(self, t1_matrix: np.ndarray, norm_m0_matrix: np.ndarray):
t1_histogram = t1_matrix.astype(float).ravel()
t1_weights = norm_m0_matrix.astype(float).ravel()
self.t1_histogram, self.t1_weights = self.remove_outliers(
t1_histogram, t1_weights
)
def remove_outliers(self, t1_histogram, t1_weights):
t1_weights = t1_weights[
(t1_histogram > (self.min_t1))
& (t1_histogram < (self.max_t1))
]
t1_histogram = t1_histogram[
(t1_histogram > (self.min_t1))
& (t1_histogram < (self.max_t1))
]
return t1_histogram, t1_weights
| 30.321429
| 82
| 0.61013
| 110
| 849
| 4.327273
| 0.245455
| 0.277311
| 0.189076
| 0.12605
| 0.22479
| 0.168067
| 0.168067
| 0.168067
| 0.168067
| 0.168067
| 0
| 0.06822
| 0.292108
| 849
| 27
| 83
| 31.444444
| 0.723794
| 0
| 0
| 0.173913
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.130435
| false
| 0
| 0.043478
| 0
| 0.26087
| 0
| 0
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 2
|
4ba5d882b2fc5de31e1705b7b18a845f264237e7
| 209
|
py
|
Python
|
main.py
|
Javert899/pm4py-tool-plugin-skeleton
|
cfc4aefd02499b323ae60e33f059a6b90e48a95f
|
[
"MIT"
] | null | null | null |
main.py
|
Javert899/pm4py-tool-plugin-skeleton
|
cfc4aefd02499b323ae60e33f059a6b90e48a95f
|
[
"MIT"
] | null | null | null |
main.py
|
Javert899/pm4py-tool-plugin-skeleton
|
cfc4aefd02499b323ae60e33f059a6b90e48a95f
|
[
"MIT"
] | null | null | null |
import pluginpackageRENAME
import preload
import os
if __name__ == "__main__":
preload.preload()
app = pluginpackageRENAME.app
app.static_folder = os.path.join(os.getcwd(), "html")
app.run()
| 19
| 57
| 0.703349
| 25
| 209
| 5.52
| 0.6
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.177033
| 209
| 10
| 58
| 20.9
| 0.802326
| 0
| 0
| 0
| 0
| 0
| 0.057416
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.375
| 0
| 0.375
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
|
0
| 2
|
4bbef0cf8a34a2518357110d8f321604ae40180d
| 337
|
py
|
Python
|
51_60/day-60/forms/main.py
|
srakhe/100-days-py
|
4d99ab35eb1376d2f8722c42e0bf98acc18fba20
|
[
"MIT"
] | null | null | null |
51_60/day-60/forms/main.py
|
srakhe/100-days-py
|
4d99ab35eb1376d2f8722c42e0bf98acc18fba20
|
[
"MIT"
] | null | null | null |
51_60/day-60/forms/main.py
|
srakhe/100-days-py
|
4d99ab35eb1376d2f8722c42e0bf98acc18fba20
|
[
"MIT"
] | null | null | null |
from flask import Flask, render_template, request
app = Flask(__name__)
@app.route('/')
def home():
return render_template('index.html')
@app.route('/login', methods=['POST'])
def login():
return f'<h2>Name: {request.form["name"]}</h2><br><h2>Password: {request.form["password"]}'
if __name__ == '__main__':
app.run()
| 18.722222
| 95
| 0.652819
| 45
| 337
| 4.577778
| 0.555556
| 0.135922
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.010309
| 0.136499
| 337
| 17
| 96
| 19.823529
| 0.697595
| 0
| 0
| 0
| 0
| 0.1
| 0.32641
| 0.207715
| 0
| 0
| 0
| 0
| 0
| 1
| 0.2
| false
| 0.1
| 0.1
| 0.2
| 0.5
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
|
0
| 2
|
29955858830022ac8b0ab1ecf8622aef64dde5f8
| 395
|
py
|
Python
|
yao_framework/__init__.py
|
QuantumBFS/yao-python
|
c877b3c4f920e76858021b6af8728f839d88fc1d
|
[
"Apache-2.0"
] | 3
|
2019-12-04T16:40:55.000Z
|
2021-12-16T04:28:59.000Z
|
yao_framework/__init__.py
|
QuantumBFS/yao-python
|
c877b3c4f920e76858021b6af8728f839d88fc1d
|
[
"Apache-2.0"
] | null | null | null |
yao_framework/__init__.py
|
QuantumBFS/yao-python
|
c877b3c4f920e76858021b6af8728f839d88fc1d
|
[
"Apache-2.0"
] | 2
|
2021-05-07T01:17:50.000Z
|
2021-12-16T04:32:31.000Z
|
# workaround static linked python
from julia.api import Julia
__julia__ = Julia(compiled_modules=False)
import os
import sys
import subprocess
from .wrappers import apply
script_dir = os.path.dirname(os.path.realpath(__file__))
def install():
"""
Install Julia packages required for yao-framework.
"""
subprocess.check_call(['julia', os.path.join(script_dir, 'install.jl')])
| 21.944444
| 76
| 0.744304
| 53
| 395
| 5.320755
| 0.622642
| 0.06383
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.146835
| 395
| 17
| 77
| 23.235294
| 0.836795
| 0.210127
| 0
| 0
| 0
| 0
| 0.050676
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.111111
| false
| 0
| 0.555556
| 0
| 0.666667
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 2
|
29cdd1a0441cda0528b31705900a6564e1af5682
| 179
|
py
|
Python
|
app/blog/urls.py
|
AjayHao/AtThirty
|
96b2ce00be8f3ed07ee5e6e2b1ca13ab25e9521d
|
[
"MIT"
] | null | null | null |
app/blog/urls.py
|
AjayHao/AtThirty
|
96b2ce00be8f3ed07ee5e6e2b1ca13ab25e9521d
|
[
"MIT"
] | null | null | null |
app/blog/urls.py
|
AjayHao/AtThirty
|
96b2ce00be8f3ed07ee5e6e2b1ca13ab25e9521d
|
[
"MIT"
] | null | null | null |
from django.conf.urls import patterns, url
from app.blog import views as blog_views
urlpatterns = [
#django url
url(r'^$', blog_views.index, name='blog_index'),
]
| 16.272727
| 52
| 0.681564
| 26
| 179
| 4.576923
| 0.576923
| 0.151261
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.201117
| 179
| 10
| 53
| 17.9
| 0.832168
| 0.055866
| 0
| 0
| 0
| 0
| 0.071429
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.4
| 0
| 0.4
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
|
0
| 2
|
29e53d00d3dfdf9edbf744f3dfa7a95332d492b5
| 170
|
py
|
Python
|
books/init_api.py
|
nabekabebe/BookFInder
|
aaa7eb3028cb2ef5552f865107ddb13a5dc3fde7
|
[
"MIT"
] | null | null | null |
books/init_api.py
|
nabekabebe/BookFInder
|
aaa7eb3028cb2ef5552f865107ddb13a5dc3fde7
|
[
"MIT"
] | null | null | null |
books/init_api.py
|
nabekabebe/BookFInder
|
aaa7eb3028cb2ef5552f865107ddb13a5dc3fde7
|
[
"MIT"
] | null | null | null |
from flask_restplus import Api
API = Api(
title="Book API",
version='1.0',
description="This Api provides endpoint for accessing books and their reviews."
)
| 21.25
| 83
| 0.705882
| 24
| 170
| 4.958333
| 0.833333
| 0.10084
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.014706
| 0.2
| 170
| 7
| 84
| 24.285714
| 0.860294
| 0
| 0
| 0
| 0
| 0
| 0.447059
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.166667
| 0
| 0.166667
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 2
|
29f700b90ab2377c8ba15763c3022ce5834a7f4f
| 4,445
|
py
|
Python
|
python/fe3lmaker/s3-driver.py
|
flarebyte/wonderful-bazar
|
810514cd7d73505b11d738f8b84d91842d18d074
|
[
"MIT"
] | null | null | null |
python/fe3lmaker/s3-driver.py
|
flarebyte/wonderful-bazar
|
810514cd7d73505b11d738f8b84d91842d18d074
|
[
"MIT"
] | null | null | null |
python/fe3lmaker/s3-driver.py
|
flarebyte/wonderful-bazar
|
810514cd7d73505b11d738f8b84d91842d18d074
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python
# This software code is made available "AS IS" without warranties of any
# kind. You may copy, display, modify and redistribute the software
# code either by itself or as incorporated into your code; provided that
# you do not remove any proprietary notices. Your use of this software
# code is at your own risk and you waive any claim against Amazon
# Digital Services, Inc. or its affiliates with respect to your use of
# this software code. (c) 2006-2007 Amazon Digital Services, Inc. or its
# affiliates.
import S3
import time
import sys
AWS_ACCESS_KEY_ID = '<INSERT YOUR AWS ACCESS KEY ID HERE>'
AWS_SECRET_ACCESS_KEY = '<INSERT YOUR AWS SECRET ACCESS KEY HERE>'
# remove these next two lines when you've updated your credentials.
print "update s3-driver.py with your AWS credentials"
sys.exit();
# convert the bucket to lowercase for vanity domains
# the bucket name must be lowercase since DNS is case-insensitive
BUCKET_NAME = AWS_ACCESS_KEY_ID.lower() + '-test-bucket'
KEY_NAME = 'test-key'
conn = S3.AWSAuthConnection(AWS_ACCESS_KEY_ID, AWS_SECRET_ACCESS_KEY)
generator = S3.QueryStringAuthGenerator(AWS_ACCESS_KEY_ID, AWS_SECRET_ACCESS_KEY)
# Check if the bucket exists. The high availability engineering of
# Amazon S3 is focused on get, put, list, and delete operations.
# Because bucket operations work against a centralized, global
# resource space, it is not appropriate to make bucket create or
# delete calls on the high availability code path of your application.
# It is better to create or delete buckets in a separate initialization
# or setup routine that you run less often.
if (conn.check_bucket_exists(BUCKET_NAME).status == 200):
print '----- bucket already exists! -----'
else:
print '----- creating bucket -----'
print conn.create_located_bucket(BUCKET_NAME, S3.Location.DEFAULT).message
# to create an EU bucket
#print conn.create_located_bucket(BUCKET_NAME, S3.Location.EU).message
print '----- bucket location -----'
print conn.get_bucket_location(BUCKET_NAME).location
print '----- listing bucket -----'
print map(lambda x: x.key, conn.list_bucket(BUCKET_NAME).entries)
print '----- putting object (with content type) -----'
print conn.put(
BUCKET_NAME,
KEY_NAME,
S3.S3Object('this is a test'),
{ 'Content-Type': 'text/plain' }).message
print '----- listing bucket -----'
print map(lambda x: x.key, conn.list_bucket(BUCKET_NAME).entries)
print '----- getting object -----'
print conn.get(BUCKET_NAME, KEY_NAME).object.data
print '----- query string auth example -----'
print "\nTry this url out in your browser (it will only be valid for 60 seconds).\n"
generator.set_expires_in(60);
url = generator.get(BUCKET_NAME, KEY_NAME)
print url
print '\npress enter> ',
sys.stdin.readline()
print "\nNow try just the url without the query string arguments. it should fail.\n"
print generator.make_bare_url(BUCKET_NAME, KEY_NAME)
print '\npress enter> ',
sys.stdin.readline()
print '----- putting object with metadata and public read acl -----'
print conn.put(
BUCKET_NAME,
KEY_NAME + '-public',
S3.S3Object('this is a publicly readable test'),
{ 'x-amz-acl': 'public-read' , 'Content-Type': 'text/plain' }
).message
print '----- anonymous read test ----'
print "\nYou should be able to try this in your browser\n"
public_key = KEY_NAME + '-public'
print generator.make_bare_url(BUCKET_NAME, public_key)
print "\npress enter> ",
sys.stdin.readline()
print "----- getting object's acl -----"
print conn.get_acl(BUCKET_NAME, KEY_NAME).object.data
print "\n----- path style url example -----";
print "Non-location-constrained buckets can also be specified as part of the url path. (This was the original url style supported by S3.)\n";
print "Try this url out in your browser (it will only be valid for 60 seconds).\n"
generator.calling_format = S3.CallingFormat.PATH
url = generator.get(BUCKET_NAME, KEY_NAME)
print url
print "\npress enter> ",
sys.stdin.readline()
print '----- deleting objects -----'
print conn.delete(BUCKET_NAME, KEY_NAME).message
print conn.delete(BUCKET_NAME, KEY_NAME + '-public').message
print '----- listing bucket -----'
print map(lambda x: x.key, conn.list_bucket(BUCKET_NAME).entries)
print '----- listing all my buckets -----'
print map(lambda x: x.name, conn.list_all_my_buckets().entries)
print '----- deleting bucket ------'
print conn.delete_bucket(BUCKET_NAME).message
| 37.352941
| 142
| 0.730259
| 673
| 4,445
| 4.710253
| 0.322437
| 0.063091
| 0.036909
| 0.048265
| 0.399054
| 0.376341
| 0.344164
| 0.215457
| 0.195268
| 0.195268
| 0
| 0.007922
| 0.148032
| 4,445
| 118
| 143
| 37.669492
| 0.829152
| 0.277165
| 0
| 0.30137
| 0
| 0.013699
| 0.396427
| 0.007521
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0.041096
| null | null | 0.589041
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
|
0
| 2
|
29f8e7164c007819cd3fd8ace5f9b2b1776fa29b
| 19,622
|
py
|
Python
|
client/buck_project_builder/tests/builder_test.py
|
aspin/pyre-check
|
fe78b41789ba3ef091b0b021d9a1c6267905a7f8
|
[
"MIT"
] | null | null | null |
client/buck_project_builder/tests/builder_test.py
|
aspin/pyre-check
|
fe78b41789ba3ef091b0b021d9a1c6267905a7f8
|
[
"MIT"
] | null | null | null |
client/buck_project_builder/tests/builder_test.py
|
aspin/pyre-check
|
fe78b41789ba3ef091b0b021d9a1c6267905a7f8
|
[
"MIT"
] | null | null | null |
# Copyright (c) 2019-present, Facebook, Inc.
#
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
import glob
import unittest
from typing import List, Optional
from unittest.mock import MagicMock, patch
from .. import BuilderException, FastBuckBuilder, Target, parser
from ..build_target import (
BuildTarget,
PythonBinary,
PythonLibrary,
PythonWheel,
ThriftLibrary,
)
from ..filesystem import Sources
from .test_common import base
class BuilderTest(unittest.TestCase):
def assert_target_names_equal(self, list_a: List[str], list_b: List[str]) -> None:
self.assertListEqual(sorted(list_a), sorted(list_b))
def assert_targets_equal(
self, targets: List[BuildTarget], target_names: List[str]
) -> None:
self.assert_target_names_equal(
[target.target for target in targets], target_names
)
def assert_raises_builder_exception(self, function, *args, expected_targets=None):
try:
function(*args)
except BuilderException as error:
self.assert_target_names_equal(error.targets, expected_targets)
else:
self.fail("Expected BuilderException to be thrown.")
def test_parse_target(self):
builder = FastBuckBuilder("/ROOT")
self.assertEqual(builder._parse_target("//a:b"), Target("a", "b"))
self.assert_raises_builder_exception(
builder._parse_target, "//a:", expected_targets=["//a:"]
)
self.assert_raises_builder_exception(
builder._parse_target, "//a/...", expected_targets=["//a/..."]
)
def test_compute_targets_to_build_simple(self):
# Dependency graph:
# a
# / \
# b <- c
# | /
# d e
build_file = MagicMock()
build_file.targets = {
"a": PythonBinary(
"/ROOT", "project", base("a", ["//project:b", "//project:c"])
),
"b": PythonLibrary("/ROOT", "project", base("b", ["//project:d"])),
"c": PythonLibrary(
"/ROOT", "project", base("c", ["//project:b", "//project:d"])
),
"d": PythonLibrary("/ROOT", "project", base("d")),
"e": PythonLibrary("/ROOT", "project", base("e")),
}
with patch.object(parser.Parser, "parse_file", return_value=build_file):
builder = FastBuckBuilder("/ROOT")
targets = builder.compute_targets_to_build(["//project:a"])
self.assert_targets_equal(
targets, ["//project:a", "//project:b", "//project:c", "//project:d"]
)
targets = builder.compute_targets_to_build(["//project:b"])
self.assert_targets_equal(targets, ["//project:b", "//project:d"])
targets = builder.compute_targets_to_build(["//project:c"])
self.assert_targets_equal(
targets, ["//project:b", "//project:c", "//project:d"]
)
targets = builder.compute_targets_to_build(["//project:d"])
self.assert_targets_equal(targets, ["//project:d"])
targets = builder.compute_targets_to_build(["//project:e"])
self.assert_targets_equal(targets, ["//project:e"])
targets = builder.compute_targets_to_build(["//project:a", "//project:e"])
self.assert_targets_equal(
targets,
[
"//project:a",
"//project:b",
"//project:c",
"//project:d",
"//project:e",
],
)
self.assert_raises_builder_exception(
builder.compute_targets_to_build,
["//project:e", "//project:f", "//project:g"],
expected_targets=["//project:f", "//project:g"],
)
builder = FastBuckBuilder("/ROOT", fail_on_unbuilt_target=False)
targets = builder.compute_targets_to_build(
["//project:e", "//project:f", "//project:g"]
)
self.assert_targets_equal(targets, ["//project:e"])
def test_compute_targets_to_build_complex(self):
# Dependency graph:
# a
# / \
# b c
# | |
# d <- e
build_file_1 = MagicMock()
build_file_1.targets = {
"a": PythonBinary(
"/ROOT", "project1", base("a", ["//project1:b", "//project2:c"])
),
"b": PythonLibrary("/ROOT", "project1", base("b", ["//project2:d"])),
}
build_file_2 = MagicMock()
build_file_2.targets = {
"c": PythonLibrary("/ROOT", "project2", base("c", ["//project2:e"])),
"d": PythonLibrary("/ROOT", "project2", base("d")),
"e": PythonLibrary("/ROOT", "project2", base("e", ["//project2:d"])),
}
build_file_mapping = {"project1": build_file_1, "project2": build_file_2}
with patch.object(
parser.Parser, "parse_file", side_effect=build_file_mapping.get
):
builder = FastBuckBuilder("/ROOT")
targets = builder.compute_targets_to_build(["//project1:a"])
self.assert_targets_equal(
targets,
[
"//project1:a",
"//project1:b",
"//project2:c",
"//project2:d",
"//project2:e",
],
)
targets = builder.compute_targets_to_build(["//project1:b"])
self.assert_targets_equal(targets, ["//project1:b", "//project2:d"])
targets = builder.compute_targets_to_build(["//project2:c"])
self.assert_targets_equal(
targets, ["//project2:c", "//project2:e", "//project2:d"]
)
targets = builder.compute_targets_to_build(["//project2:d"])
self.assert_targets_equal(targets, ["//project2:d"])
targets = builder.compute_targets_to_build(["//project2:e"])
self.assert_targets_equal(targets, ["//project2:e", "//project2:d"])
self.assert_raises_builder_exception(
builder.compute_targets_to_build,
["//project1:f"],
expected_targets=["//project1:f"],
)
def test_targets_to_build_file_wildcard(self):
# Dependency graph:
# a -> c -> d <- e b
build_file_1 = MagicMock()
build_file_1.targets = {
"a": PythonBinary("/ROOT", "project1", base("a", ["//project2:c"])),
"b": PythonLibrary("/ROOT", "project1", base("b")),
}
build_file_2 = MagicMock()
build_file_2.targets = {
"c": PythonLibrary("/ROOT", "project2", base("c", ["//project2:d"])),
"d": PythonLibrary("/ROOT", "project2", base("d")),
"e": PythonLibrary("/ROOT", "project2", base("e", ["//project2:d"])),
}
build_file_mapping = {"project1": build_file_1, "project2": build_file_2}
with patch.object(
parser.Parser, "parse_file", side_effect=build_file_mapping.get
):
builder = FastBuckBuilder("/ROOT")
targets = builder.compute_targets_to_build(["//project1:"])
self.assert_targets_equal(
targets,
["//project1:a", "//project1:b", "//project2:c", "//project2:d"],
)
targets = builder.compute_targets_to_build(["//project2:"])
self.assert_targets_equal(
targets, ["//project2:c", "//project2:d", "//project2:e"]
)
targets = builder.compute_targets_to_build(["//project1:", "//project2:"])
self.assert_targets_equal(
targets,
[
"//project1:a",
"//project1:b",
"//project2:c",
"//project2:d",
"//project2:e",
],
)
def test_targets_to_build_directory_wildcard(self):
# Dependency graph:
# a -> c d b <- e
build_file_1 = MagicMock()
build_file_1.targets = {
"a": PythonBinary(
"/ROOT", "project1", base("a", ["//project1/subproject:c"])
),
"b": PythonLibrary("/ROOT", "project1", base("b")),
}
build_file_2 = MagicMock()
build_file_2.targets = {
"c": PythonLibrary("/ROOT", "project1/subproject", base("c")),
"d": PythonLibrary("/ROOT", "project1/subproject", base("d")),
}
build_file_3 = MagicMock()
build_file_3.targets = {
"e": PythonLibrary("/ROOT", "project2", base("e", ["//project1:b"]))
}
build_file_mapping = {
"project1": build_file_1,
"project1/subproject": build_file_2,
"project2": build_file_3,
}
with patch.object(
parser.Parser, "parse_file", side_effect=build_file_mapping.get
):
builder = FastBuckBuilder("/ROOT")
with patch.object(
glob,
"iglob",
return_value=[
"/ROOT/project1/TARGETS",
"/ROOT/project1/subproject/TARGETS",
"/ROOT/project2/TARGETS",
],
):
targets = builder.compute_targets_to_build(["//..."])
self.assert_targets_equal(
targets,
[
"//project1:a",
"//project1:b",
"//project1/subproject:c",
"//project1/subproject:d",
"//project2:e",
],
)
with patch.object(
glob,
"iglob",
return_value=[
"/ROOT/project1/TARGETS",
"/ROOT/project1/subproject/TARGETS",
],
):
targets = builder.compute_targets_to_build(["//project1/..."])
self.assert_targets_equal(
targets,
[
"//project1:a",
"//project1:b",
"//project1/subproject:c",
"//project1/subproject:d",
],
)
with patch.object(
glob, "iglob", return_value=["/ROOT/project1/subproject/TARGETS"]
):
targets = builder.compute_targets_to_build(
["//project1/subproject/..."]
)
self.assert_targets_equal(
targets, ["//project1/subproject:c", "//project1/subproject:d"]
)
with patch.object(glob, "iglob", return_value=["/ROOT/project2/TARGETS"]):
targets = builder.compute_targets_to_build(["//project2/..."])
self.assert_targets_equal(targets, ["//project2:e", "//project1:b"])
def test_compute_targets_to_build_duplicates(self):
# Dependency graph:
# a
# / \
# b-py c-py
# |
# b
build_file = MagicMock()
thrift_target = ThriftLibrary(
"/ROOT", "project", base("b"), ["b.thrift"], False
)
build_file.targets = {
"a": PythonBinary(
"/ROOT",
"project",
base("a", dependencies=["//project:b-py", "//project:c-py"]),
),
"b": thrift_target,
"b-py": thrift_target,
"c-py": ThriftLibrary(
"/ROOT",
"project",
base("c", dependencies=["//project:b"]),
["c.thrift"],
False,
),
}
with patch.object(parser.Parser, "parse_file", return_value=build_file):
builder = FastBuckBuilder("/ROOT")
# b and b-py refer to the same build target; we should only build it once.
targets = builder.compute_targets_to_build(["//project:a"])
self.assert_targets_equal(
targets, ["//project:a", "//project:b", "//project:c"]
)
def test_targets_to_build_wheels(self):
build_file_1 = MagicMock()
build_file_1.targets = {
"a": PythonBinary(
"/ROOT", "project1", base("a", ["//project2/wheel:wheel"])
)
}
build_file_2 = MagicMock()
build_file_2.targets = {
"wheel": PythonWheel("/ROOT", "project2/wheel", base("wheel"), {}, {})
}
build_file_mapping = {"project1": build_file_1, "project2/wheel": build_file_2}
with patch.object(
parser.Parser, "parse_file", side_effect=build_file_mapping.get
):
builder = FastBuckBuilder("/ROOT")
targets = builder.compute_targets_to_build(["//project1:a"])
self.assert_targets_equal(
targets, ["//project1:a", "//project2/wheel:wheel"]
)
targets = builder.compute_targets_to_build(["//project2/wheel:wheel"])
self.assert_targets_equal(targets, ["//project2/wheel:wheel"])
def test_compute_reverse_dependencies(self):
# Dependency graph:
# a
# / \
# b <- c
# | /
# d e
builder = FastBuckBuilder("/ROOT")
a = PythonBinary("/ROOT", "project", base("a", ["//project:b", "//project:c"]))
b = PythonLibrary("/ROOT", "project", base("b", ["//project:d"]))
c = PythonLibrary("/ROOT", "project", base("c", ["//project:b", "//project:d"]))
d = PythonLibrary("/ROOT", "project", base("d"))
e = PythonLibrary("/ROOT", "project", base("e"))
targets = [a, b, c, d, e]
reverse_dependencies = builder.compute_reverse_dependencies(targets)
self.assertDictEqual(
dict(reverse_dependencies),
{"//project:b": [a, c], "//project:c": [a], "//project:d": [b, c]},
)
self.assertEqual(reverse_dependencies["//project:a"], [])
self.assertEqual(reverse_dependencies["//project:e"], [])
def test_normalize_targets(self):
build_file_1 = MagicMock()
build_file_1.targets = {
"a": PythonLibrary("/ROOT", "project1", base("a")),
"b": PythonLibrary("/ROOT", "project1", base("b")),
"c": PythonLibrary("/ROOT", "project1", base("c")),
}
build_file_2 = MagicMock()
build_file_2.targets = {
"d": PythonLibrary("/ROOT", "project1/subproject", base("d")),
"e": PythonLibrary("/ROOT", "project1/subproject", base("e")),
}
build_file_3 = MagicMock()
build_file_3.targets = {"f": PythonLibrary("/ROOT", "project2", base("f"))}
build_file_mapping = {
"project1": build_file_1,
"project1/subproject": build_file_2,
"project2": build_file_3,
}
with patch.object(
parser.Parser, "parse_file", side_effect=build_file_mapping.get
):
builder = FastBuckBuilder("/ROOT")
# Regular targets
normalized_targets = builder._normalize_target("//project1:a")
self.assert_target_names_equal(normalized_targets, ["//project1:a"])
# File wildcard targets
normalized_targets = builder._normalize_target("//project1:")
self.assert_target_names_equal(
normalized_targets, ["//project1:a", "//project1:b", "//project1:c"]
)
normalized_targets = builder._normalize_target("//project1/subproject:")
self.assert_target_names_equal(
normalized_targets,
["//project1/subproject:d", "//project1/subproject:e"],
)
normalized_targets = builder._normalize_target("//project2:")
self.assert_target_names_equal(normalized_targets, ["//project2:f"])
# Directory wildcard targets
with patch.object(
glob,
"iglob",
return_value=[
"/ROOT/project1/TARGETS",
"/ROOT/project1/subproject/TARGETS",
"/ROOT/project2/TARGETS",
],
) as fake_iglob:
normalized_targets = builder._normalize_target("//...")
self.assert_target_names_equal(
normalized_targets,
[
"//project1:a",
"//project1:b",
"//project1:c",
"//project1/subproject:d",
"//project1/subproject:e",
"//project2:f",
],
)
fake_iglob.assert_called_once_with("/ROOT/**/TARGETS", recursive=True)
with patch.object(
glob,
"iglob",
return_value=[
"/ROOT/project1/TARGETS",
"/ROOT/project1/subproject/TARGETS",
],
) as fake_iglob:
normalized_targets = builder._normalize_target("//project1/...")
self.assert_target_names_equal(
normalized_targets,
[
"//project1:a",
"//project1:b",
"//project1:c",
"//project1/subproject:d",
"//project1/subproject:e",
],
)
fake_iglob.assert_called_once_with(
"/ROOT/project1/**/TARGETS", recursive=True
)
with patch.object(
glob, "iglob", return_value=["/ROOT/project1/subproject/TARGETS"]
) as fake_iglob:
normalized_targets = builder._normalize_target(
"//project1/subproject/..."
)
self.assert_target_names_equal(
normalized_targets,
["//project1/subproject:d", "//project1/subproject:e"],
)
fake_iglob.assert_called_once_with(
"/ROOT/project1/subproject/**/TARGETS", recursive=True
)
with patch.object(
glob, "iglob", return_value=["/ROOT/project2/TARGETS"]
) as fake_iglob:
normalized_targets = builder._normalize_target("//project2/...")
self.assert_target_names_equal(normalized_targets, ["//project2:f"])
fake_iglob.assert_called_once_with(
"/ROOT/project2/**/TARGETS", recursive=True
)
def test_build(self):
with patch.object(
FastBuckBuilder, "compute_targets_to_build"
) as compute_targets_to_build:
fake_targets = [MagicMock(), MagicMock(), MagicMock()]
compute_targets_to_build.return_value = fake_targets
builder = FastBuckBuilder("/ROOT", output_directory="/output")
builder.build(["//target:"])
for fake_target in fake_targets:
fake_target.build.assert_called_once_with("/output")
| 37.590038
| 88
| 0.505045
| 1,770
| 19,622
| 5.358192
| 0.075706
| 0.050295
| 0.048714
| 0.066428
| 0.767609
| 0.737453
| 0.702657
| 0.6686
| 0.620413
| 0.569907
| 0
| 0.014753
| 0.350576
| 19,622
| 521
| 89
| 37.662188
| 0.729499
| 0.030017
| 0
| 0.509479
| 0
| 0
| 0.183618
| 0.052064
| 0
| 0
| 0
| 0
| 0.116114
| 1
| 0.030806
| false
| 0
| 0.018957
| 0
| 0.052133
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 2
|
29fdda258cbe5d54b3217108b57775e883bf274f
| 549
|
py
|
Python
|
fishpass/migrations/0004_auto_20180925_1825.py
|
Ecotrust/FishPass
|
a69a4f9de46f28653ae92ef33c1e5cf7036cfb37
|
[
"MIT"
] | 3
|
2019-03-01T04:00:21.000Z
|
2022-02-10T22:17:20.000Z
|
fishpass/migrations/0004_auto_20180925_1825.py
|
Ecotrust/FishPass
|
a69a4f9de46f28653ae92ef33c1e5cf7036cfb37
|
[
"MIT"
] | 165
|
2018-04-13T18:24:39.000Z
|
2022-03-02T03:27:33.000Z
|
fishpass/migrations/0004_auto_20180925_1825.py
|
Ecotrust/FishPass
|
a69a4f9de46f28653ae92ef33c1e5cf7036cfb37
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
# Generated by Django 1.11.15 on 2018-09-26 01:25
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('fishpass', '0003_auto_20180925_1825'),
]
operations = [
migrations.AlterField(
model_name='project',
name='focus_region',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='fishpass.FocusArea'),
),
]
| 24.954545
| 106
| 0.657559
| 63
| 549
| 5.555556
| 0.714286
| 0.068571
| 0.08
| 0.125714
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.079812
| 0.224044
| 549
| 21
| 107
| 26.142857
| 0.741784
| 0.125683
| 0
| 0
| 1
| 0
| 0.142558
| 0.048218
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0.142857
| 0.214286
| 0
| 0.428571
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 2
|
4b1d49f6efb27ace41851af3af43454783b205c7
| 399
|
py
|
Python
|
solutions/level0_tasks.py
|
sksuzuki/How-to-Learn-to-Code
|
347943dbd2a3d176f3459c2e9f18cba1bdf78597
|
[
"MIT"
] | null | null | null |
solutions/level0_tasks.py
|
sksuzuki/How-to-Learn-to-Code
|
347943dbd2a3d176f3459c2e9f18cba1bdf78597
|
[
"MIT"
] | 3
|
2019-06-06T21:11:41.000Z
|
2019-06-06T21:12:37.000Z
|
solutions/level0_tasks.py
|
sksuzuki/How-to-Learn-to-Code
|
347943dbd2a3d176f3459c2e9f18cba1bdf78597
|
[
"MIT"
] | null | null | null |
import numpy as np
def square(x):
"""Square a number"""
return x ** 2
def volume_converter(volume, unit):
"""Convert certain SI volumes to mLs"""
conversions = {'mL': 1E-3, 'uL': 1E-6, 'nL': 1E-9, 'kL': 1E3}
return round(volume * conversions[unit], 10)
def squared_sum(in_list):
"""Finds the sum of squares of a list of numbers."""
return np.sum(np.array(in_list)**2)
| 26.6
| 65
| 0.629073
| 65
| 399
| 3.8
| 0.646154
| 0.048583
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.037855
| 0.205514
| 399
| 14
| 66
| 28.5
| 0.741325
| 0.240602
| 0
| 0
| 0
| 0
| 0.027875
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.375
| false
| 0
| 0.125
| 0
| 0.875
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 2
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.