hexsha
string | size
int64 | ext
string | lang
string | max_stars_repo_path
string | max_stars_repo_name
string | max_stars_repo_head_hexsha
string | max_stars_repo_licenses
list | max_stars_count
int64 | max_stars_repo_stars_event_min_datetime
string | max_stars_repo_stars_event_max_datetime
string | max_issues_repo_path
string | max_issues_repo_name
string | max_issues_repo_head_hexsha
string | max_issues_repo_licenses
list | max_issues_count
int64 | max_issues_repo_issues_event_min_datetime
string | max_issues_repo_issues_event_max_datetime
string | max_forks_repo_path
string | max_forks_repo_name
string | max_forks_repo_head_hexsha
string | max_forks_repo_licenses
list | max_forks_count
int64 | max_forks_repo_forks_event_min_datetime
string | max_forks_repo_forks_event_max_datetime
string | content
string | avg_line_length
float64 | max_line_length
int64 | alphanum_fraction
float64 | qsc_code_num_words_quality_signal
int64 | qsc_code_num_chars_quality_signal
float64 | qsc_code_mean_word_length_quality_signal
float64 | qsc_code_frac_words_unique_quality_signal
float64 | qsc_code_frac_chars_top_2grams_quality_signal
float64 | qsc_code_frac_chars_top_3grams_quality_signal
float64 | qsc_code_frac_chars_top_4grams_quality_signal
float64 | qsc_code_frac_chars_dupe_5grams_quality_signal
float64 | qsc_code_frac_chars_dupe_6grams_quality_signal
float64 | qsc_code_frac_chars_dupe_7grams_quality_signal
float64 | qsc_code_frac_chars_dupe_8grams_quality_signal
float64 | qsc_code_frac_chars_dupe_9grams_quality_signal
float64 | qsc_code_frac_chars_dupe_10grams_quality_signal
float64 | qsc_code_frac_chars_replacement_symbols_quality_signal
float64 | qsc_code_frac_chars_digital_quality_signal
float64 | qsc_code_frac_chars_whitespace_quality_signal
float64 | qsc_code_size_file_byte_quality_signal
float64 | qsc_code_num_lines_quality_signal
float64 | qsc_code_num_chars_line_max_quality_signal
float64 | qsc_code_num_chars_line_mean_quality_signal
float64 | qsc_code_frac_chars_alphabet_quality_signal
float64 | qsc_code_frac_chars_comments_quality_signal
float64 | qsc_code_cate_xml_start_quality_signal
float64 | qsc_code_frac_lines_dupe_lines_quality_signal
float64 | qsc_code_cate_autogen_quality_signal
float64 | qsc_code_frac_lines_long_string_quality_signal
float64 | qsc_code_frac_chars_string_length_quality_signal
float64 | qsc_code_frac_chars_long_word_length_quality_signal
float64 | qsc_code_frac_lines_string_concat_quality_signal
float64 | qsc_code_cate_encoded_data_quality_signal
float64 | qsc_code_frac_chars_hex_words_quality_signal
float64 | qsc_code_frac_lines_prompt_comments_quality_signal
float64 | qsc_code_frac_lines_assert_quality_signal
float64 | qsc_codepython_cate_ast_quality_signal
float64 | qsc_codepython_frac_lines_func_ratio_quality_signal
float64 | qsc_codepython_cate_var_zero_quality_signal
bool | qsc_codepython_frac_lines_pass_quality_signal
float64 | qsc_codepython_frac_lines_import_quality_signal
float64 | qsc_codepython_frac_lines_simplefunc_quality_signal
float64 | qsc_codepython_score_lines_no_logic_quality_signal
float64 | qsc_codepython_frac_lines_print_quality_signal
float64 | qsc_code_num_words
int64 | qsc_code_num_chars
int64 | qsc_code_mean_word_length
int64 | qsc_code_frac_words_unique
null | qsc_code_frac_chars_top_2grams
int64 | qsc_code_frac_chars_top_3grams
int64 | qsc_code_frac_chars_top_4grams
int64 | qsc_code_frac_chars_dupe_5grams
int64 | qsc_code_frac_chars_dupe_6grams
int64 | qsc_code_frac_chars_dupe_7grams
int64 | qsc_code_frac_chars_dupe_8grams
int64 | qsc_code_frac_chars_dupe_9grams
int64 | qsc_code_frac_chars_dupe_10grams
int64 | qsc_code_frac_chars_replacement_symbols
int64 | qsc_code_frac_chars_digital
int64 | qsc_code_frac_chars_whitespace
int64 | qsc_code_size_file_byte
int64 | qsc_code_num_lines
int64 | qsc_code_num_chars_line_max
int64 | qsc_code_num_chars_line_mean
int64 | qsc_code_frac_chars_alphabet
int64 | qsc_code_frac_chars_comments
int64 | qsc_code_cate_xml_start
int64 | qsc_code_frac_lines_dupe_lines
int64 | qsc_code_cate_autogen
int64 | qsc_code_frac_lines_long_string
int64 | qsc_code_frac_chars_string_length
int64 | qsc_code_frac_chars_long_word_length
int64 | qsc_code_frac_lines_string_concat
null | qsc_code_cate_encoded_data
int64 | qsc_code_frac_chars_hex_words
int64 | qsc_code_frac_lines_prompt_comments
int64 | qsc_code_frac_lines_assert
int64 | qsc_codepython_cate_ast
int64 | qsc_codepython_frac_lines_func_ratio
int64 | qsc_codepython_cate_var_zero
int64 | qsc_codepython_frac_lines_pass
int64 | qsc_codepython_frac_lines_import
int64 | qsc_codepython_frac_lines_simplefunc
int64 | qsc_codepython_score_lines_no_logic
int64 | qsc_codepython_frac_lines_print
int64 | effective
string | hits
int64 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
39f4f90e9b80ade83346acbec06fcedbaeda8cb3
| 88
|
py
|
Python
|
advanced_tools/__init__.py
|
kvdogan/advanced_tools
|
7e93232374980d83fda8051496a190188c11fe0d
|
[
"MIT"
] | null | null | null |
advanced_tools/__init__.py
|
kvdogan/advanced_tools
|
7e93232374980d83fda8051496a190188c11fe0d
|
[
"MIT"
] | null | null | null |
advanced_tools/__init__.py
|
kvdogan/advanced_tools
|
7e93232374980d83fda8051496a190188c11fe0d
|
[
"MIT"
] | null | null | null |
from advanced_tools.IO_path_utils import *
from advanced_tools.algorithm_utils import *
| 29.333333
| 44
| 0.863636
| 13
| 88
| 5.461538
| 0.615385
| 0.338028
| 0.478873
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.090909
| 88
| 2
| 45
| 44
| 0.8875
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
84377da9e8bef2666e66841f43d9581ba693e418
| 39,550
|
py
|
Python
|
wicon/glyph.py
|
Wudan07/wIcon
|
9189b7029759a22371827426b5342b6dc976f1b2
|
[
"MIT"
] | null | null | null |
wicon/glyph.py
|
Wudan07/wIcon
|
9189b7029759a22371827426b5342b6dc976f1b2
|
[
"MIT"
] | null | null | null |
wicon/glyph.py
|
Wudan07/wIcon
|
9189b7029759a22371827426b5342b6dc976f1b2
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
# Copyright (c) 2015 Brad Newbold (wudan07 [at] gmail.com)
# See LICENSE for details.
# glyph.py
#
"""wIcon library:
glyph provides GlyphObject
"""
##from handy import *
##from common import *
### represents a character in a glyphString
class GlyphObject:
def __init__(self, glyph):
### set to glyph value
self.glyph = glyph
### will be an array of pixels, unique to each glyph
self.coords = []
### will be an adjustment to the next characters starting point - I eyeballed this. Sorry typographers!
self.flash = 6
if glyph == 'A':
self.coords.append([0, 7])
self.coords.append([0, 8])
self.coords.append([1, 3])
self.coords.append([1, 4])
self.coords.append([1, 5])
self.coords.append([1, 6])
self.coords.append([2, 0])
self.coords.append([2, 1])
self.coords.append([2, 2])
self.coords.append([2, 6])
self.coords.append([3, 0])
self.coords.append([3, 1])
self.coords.append([3, 2])
self.coords.append([3, 6])
self.coords.append([4, 3])
self.coords.append([4, 4])
self.coords.append([4, 5])
self.coords.append([4, 6])
self.coords.append([5, 6])
self.coords.append([5, 7])
self.coords.append([5, 8])
self.flash = 7
elif glyph == 'a':
self.coords.append([1, 3])
self.coords.append([2, 3])
self.coords.append([3, 3])
self.coords.append([4, 3])
self.coords.append([0, 4])
self.coords.append([4, 4])
self.coords.append([0, 5])
self.coords.append([4, 5])
self.coords.append([0, 6])
self.coords.append([4, 6])
self.coords.append([0, 7])
self.coords.append([3, 7])
self.coords.append([4, 7])
self.coords.append([1, 8])
self.coords.append([2, 8])
self.coords.append([4, 8])
self.flash = 6
elif glyph == 'B':
self.coords.append([0, 0])
self.coords.append([0, 1])
self.coords.append([0, 2])
self.coords.append([0, 3])
self.coords.append([0, 4])
self.coords.append([0, 5])
self.coords.append([0, 6])
self.coords.append([0, 7])
self.coords.append([0, 8])
self.coords.append([1, 0])
self.coords.append([1, 4])
self.coords.append([1, 8])
self.coords.append([2, 0])
self.coords.append([2, 4])
self.coords.append([2, 8])
self.coords.append([3, 0])
self.coords.append([3, 4])
self.coords.append([3, 8])
self.coords.append([4, 1])
self.coords.append([4, 2])
self.coords.append([4, 3])
self.coords.append([4, 5])
self.coords.append([4, 6])
self.coords.append([4, 7])
self.flash = 6
elif glyph == 'b':
self.coords.append([0, 0])
self.coords.append([0, 1])
self.coords.append([0, 2])
self.coords.append([0, 3])
self.coords.append([0, 4])
self.coords.append([0, 5])
self.coords.append([0, 6])
self.coords.append([0, 7])
self.coords.append([0, 8])
self.coords.append([1, 4])
self.coords.append([1, 8])
self.coords.append([2, 3])
self.coords.append([2, 8])
self.coords.append([3, 3])
self.coords.append([3, 8])
self.coords.append([4, 4])
self.coords.append([4, 5])
self.coords.append([4, 6])
self.coords.append([4, 7])
self.flash = 6
elif glyph == 'C':
self.coords.append([0, 2])
self.coords.append([0, 3])
self.coords.append([0, 4])
self.coords.append([0, 5])
self.coords.append([0, 6])
self.coords.append([1, 1])
self.coords.append([1, 7])
self.coords.append([2, 0])
self.coords.append([2, 8])
self.coords.append([3, 0])
self.coords.append([3, 8])
self.coords.append([4, 0])
self.coords.append([4, 8])
self.flash = 6
elif glyph == 'c':
self.coords.append([1, 3])
self.coords.append([2, 3])
self.coords.append([3, 3])
self.coords.append([0, 4])
self.coords.append([4, 4])
self.coords.append([0, 5])
self.coords.append([0, 6])
self.coords.append([0, 7])
self.coords.append([4, 7])
self.coords.append([1, 8])
self.coords.append([2, 8])
self.coords.append([3, 8])
self.flash = 6
elif glyph == 'D':
self.coords.append([0, 0])
self.coords.append([0, 1])
self.coords.append([0, 2])
self.coords.append([0, 3])
self.coords.append([0, 4])
self.coords.append([0, 5])
self.coords.append([0, 6])
self.coords.append([0, 7])
self.coords.append([0, 8])
self.coords.append([1, 0])
self.coords.append([1, 8])
self.coords.append([2, 0])
self.coords.append([2, 8])
self.coords.append([3, 0])
self.coords.append([3, 8])
self.coords.append([4, 1])
self.coords.append([4, 7])
self.coords.append([5, 2])
self.coords.append([5, 3])
self.coords.append([5, 4])
self.coords.append([5, 5])
self.coords.append([5, 6])
self.flash = 7
elif glyph == 'd':
self.coords.append([4, 0])
self.coords.append([4, 1])
self.coords.append([4, 2])
self.coords.append([1, 3])
self.coords.append([2, 3])
self.coords.append([3, 3])
self.coords.append([4, 3])
self.coords.append([0, 4])
self.coords.append([4, 4])
self.coords.append([0, 5])
self.coords.append([4, 5])
self.coords.append([0, 6])
self.coords.append([4, 6])
self.coords.append([0, 7])
self.coords.append([3, 7])
self.coords.append([4, 7])
self.coords.append([1, 8])
self.coords.append([2, 8])
self.coords.append([4, 8])
self.flash = 6
elif glyph == 'E':
self.coords.append([0, 0])
self.coords.append([0, 1])
self.coords.append([0, 2])
self.coords.append([0, 3])
self.coords.append([0, 4])
self.coords.append([0, 5])
self.coords.append([0, 6])
self.coords.append([0, 7])
self.coords.append([0, 8])
self.coords.append([1, 0])
self.coords.append([1, 4])
self.coords.append([1, 8])
self.coords.append([2, 0])
self.coords.append([2, 4])
self.coords.append([2, 8])
self.coords.append([3, 0])
self.coords.append([3, 4])
self.coords.append([3, 8])
self.coords.append([4, 0])
self.coords.append([4, 4])
self.coords.append([4, 8])
self.flash = 6
elif glyph == 'e':
self.coords.append([1, 3])
self.coords.append([2, 3])
self.coords.append([3, 3])
self.coords.append([0, 4])
self.coords.append([4, 4])
self.coords.append([0, 5])
self.coords.append([1, 5])
self.coords.append([2, 5])
self.coords.append([3, 5])
self.coords.append([0, 6])
self.coords.append([0, 7])
self.coords.append([4, 7])
self.coords.append([1, 8])
self.coords.append([2, 8])
self.coords.append([3, 8])
self.flash = 6
elif glyph == 'F':
self.coords.append([0, 0])
self.coords.append([0, 1])
self.coords.append([0, 2])
self.coords.append([0, 3])
self.coords.append([0, 4])
self.coords.append([0, 5])
self.coords.append([0, 6])
self.coords.append([0, 7])
self.coords.append([0, 8])
self.coords.append([1, 0])
self.coords.append([1, 4])
self.coords.append([2, 0])
self.coords.append([2, 4])
self.coords.append([3, 0])
self.coords.append([3, 4])
self.coords.append([4, 0])
self.coords.append([4, 4])
self.flash = 6
elif glyph == 'f':
self.coords.append([2, 1])
self.coords.append([3, 1])
self.coords.append([1, 2])
self.coords.append([0, 3])
self.coords.append([1, 3])
self.coords.append([2, 3])
self.coords.append([3, 3])
self.coords.append([1, 4])
self.coords.append([1, 5])
self.coords.append([1, 6])
self.coords.append([1, 7])
self.coords.append([1, 8])
self.flash = 5
elif glyph == 'G':
self.coords.append([0, 2])
self.coords.append([0, 3])
self.coords.append([0, 4])
self.coords.append([0, 5])
self.coords.append([0, 6])
self.coords.append([1, 1])
self.coords.append([1, 7])
self.coords.append([2, 0])
self.coords.append([2, 8])
self.coords.append([3, 0])
self.coords.append([3, 4])
self.coords.append([3, 8])
self.coords.append([4, 0])
self.coords.append([4, 4])
self.coords.append([4, 5])
self.coords.append([4, 6])
self.coords.append([4, 7])
self.coords.append([4, 8])
self.flash = 6
elif glyph == 'g':
self.coords.append([1, 3])
self.coords.append([2, 3])
self.coords.append([3, 3])
self.coords.append([4, 3])
self.coords.append([0, 4])
self.coords.append([4, 4])
self.coords.append([0, 5])
self.coords.append([4, 5])
self.coords.append([0, 6])
self.coords.append([4, 6])
self.coords.append([0, 7])
self.coords.append([3, 7])
self.coords.append([4, 7])
self.coords.append([1, 8])
self.coords.append([2, 8])
self.coords.append([4, 8])
self.coords.append([4, 9])
self.coords.append([1, 10])
self.coords.append([2, 10])
self.coords.append([3, 10])
self.flash = 6
elif glyph == 'H':
self.coords.append([0, 0])
self.coords.append([0, 1])
self.coords.append([0, 2])
self.coords.append([0, 3])
self.coords.append([0, 4])
self.coords.append([0, 5])
self.coords.append([0, 6])
self.coords.append([0, 7])
self.coords.append([0, 8])
self.coords.append([1, 4])
self.coords.append([2, 4])
self.coords.append([3, 4])
self.coords.append([4, 0])
self.coords.append([4, 1])
self.coords.append([4, 2])
self.coords.append([4, 3])
self.coords.append([4, 4])
self.coords.append([4, 5])
self.coords.append([4, 6])
self.coords.append([4, 7])
self.coords.append([4, 8])
self.flash = 6
elif glyph == 'h':
self.coords.append([0, 0])
self.coords.append([0, 1])
self.coords.append([0, 2])
self.coords.append([0, 3])
self.coords.append([1, 3])
self.coords.append([2, 3])
self.coords.append([3, 3])
self.coords.append([0, 4])
self.coords.append([4, 4])
self.coords.append([0, 5])
self.coords.append([4, 5])
self.coords.append([0, 6])
self.coords.append([4, 6])
self.coords.append([0, 7])
self.coords.append([4, 7])
self.coords.append([0, 8])
self.coords.append([4, 8])
self.flash = 6
elif glyph == 'I':
self.coords.append([0, 0])
self.coords.append([0, 8])
self.coords.append([1, 0])
self.coords.append([1, 8])
self.coords.append([2, 0])
self.coords.append([2, 1])
self.coords.append([2, 2])
self.coords.append([2, 3])
self.coords.append([2, 4])
self.coords.append([2, 5])
self.coords.append([2, 6])
self.coords.append([2, 7])
self.coords.append([2, 8])
self.coords.append([3, 0])
self.coords.append([3, 8])
self.coords.append([4, 0])
self.coords.append([4, 8])
self.flash = 6
elif glyph == 'i':
self.coords.append([1, 1])
self.coords.append([0, 3])
self.coords.append([1, 3])
self.coords.append([1, 4])
self.coords.append([1, 5])
self.coords.append([1, 6])
self.coords.append([1, 7])
self.coords.append([1, 8])
self.coords.append([2, 8])
self.flash = 4
elif glyph == 'J':
self.coords.append([0, 8])
self.coords.append([1, 8])
self.coords.append([2, 0])
self.coords.append([2, 8])
self.coords.append([3, 0])
self.coords.append([3, 8])
self.coords.append([4, 0])
self.coords.append([4, 1])
self.coords.append([4, 2])
self.coords.append([4, 3])
self.coords.append([4, 4])
self.coords.append([4, 5])
self.coords.append([4, 6])
self.coords.append([4, 7])
self.flash = 6
elif glyph == 'j':
self.coords.append([2, 1])
self.coords.append([0, 3])
self.coords.append([1, 3])
self.coords.append([2, 3])
self.coords.append([2, 4])
self.coords.append([2, 5])
self.coords.append([2, 6])
self.coords.append([2, 7])
self.coords.append([2, 8])
self.coords.append([0, 9])
self.coords.append([1, 9])
self.flash = 4
elif glyph == 'K':
self.coords.append([0, 0])
self.coords.append([0, 1])
self.coords.append([0, 2])
self.coords.append([0, 3])
self.coords.append([0, 4])
self.coords.append([0, 5])
self.coords.append([0, 6])
self.coords.append([0, 7])
self.coords.append([0, 8])
self.coords.append([1, 4])
self.coords.append([2, 3])
self.coords.append([2, 5])
self.coords.append([3, 1])
self.coords.append([3, 2])
self.coords.append([3, 6])
self.coords.append([4, 0])
self.coords.append([4, 7])
self.coords.append([5, 8])
self.flash = 7
elif glyph == 'k':
self.coords.append([0, 0])
self.coords.append([0, 1])
self.coords.append([0, 2])
self.coords.append([0, 3])
self.coords.append([3, 3])
self.coords.append([0, 4])
self.coords.append([2, 4])
self.coords.append([0, 5])
self.coords.append([1, 5])
self.coords.append([0, 6])
self.coords.append([2, 6])
self.coords.append([0, 7])
self.coords.append([3, 7])
self.coords.append([0, 8])
self.coords.append([4, 8])
self.flash = 6
elif glyph == 'L':
self.coords.append([0, 0])
self.coords.append([0, 1])
self.coords.append([0, 2])
self.coords.append([0, 3])
self.coords.append([0, 4])
self.coords.append([0, 5])
self.coords.append([0, 6])
self.coords.append([0, 7])
self.coords.append([0, 8])
self.coords.append([1, 8])
self.coords.append([2, 8])
self.coords.append([3, 8])
self.coords.append([4, 8])
self.flash = 6
elif glyph == 'l':
self.coords.append([1, 1])
self.coords.append([1, 2])
self.coords.append([1, 3])
self.coords.append([1, 4])
self.coords.append([1, 5])
self.coords.append([1, 6])
self.coords.append([1, 7])
self.coords.append([1, 8])
self.coords.append([2, 8])
self.flash = 4
elif glyph == 'M':
self.coords.append([0, 0])
self.coords.append([0, 1])
self.coords.append([0, 2])
self.coords.append([0, 3])
self.coords.append([0, 4])
self.coords.append([0, 5])
self.coords.append([0, 6])
self.coords.append([0, 7])
self.coords.append([0, 8])
self.coords.append([1, 1])
self.coords.append([1, 2])
self.coords.append([1, 3])
self.coords.append([2, 4])
self.coords.append([2, 5])
self.coords.append([2, 6])
self.coords.append([3, 1])
self.coords.append([3, 2])
self.coords.append([3, 3])
self.coords.append([4, 0])
self.coords.append([4, 1])
self.coords.append([4, 2])
self.coords.append([4, 3])
self.coords.append([4, 4])
self.coords.append([4, 5])
self.coords.append([4, 6])
self.coords.append([4, 7])
self.coords.append([4, 8])
self.flash = 6
elif glyph == 'm':
self.coords.append([0, 3])
self.coords.append([1, 3])
self.coords.append([3, 3])
self.coords.append([0, 4])
self.coords.append([2, 4])
self.coords.append([4, 4])
self.coords.append([0, 5])
self.coords.append([2, 5])
self.coords.append([4, 5])
self.coords.append([0, 6])
self.coords.append([2, 6])
self.coords.append([4, 6])
self.coords.append([0, 7])
self.coords.append([2, 7])
self.coords.append([4, 7])
self.coords.append([0, 8])
self.coords.append([2, 8])
self.coords.append([4, 8])
self.flash = 6
elif glyph == 'N':
self.coords.append([0, 0])
self.coords.append([0, 1])
self.coords.append([0, 2])
self.coords.append([0, 3])
self.coords.append([0, 4])
self.coords.append([0, 5])
self.coords.append([0, 6])
self.coords.append([0, 7])
self.coords.append([0, 8])
self.coords.append([1, 1])
self.coords.append([1, 2])
self.coords.append([2, 3])
self.coords.append([2, 4])
self.coords.append([2, 5])
self.coords.append([3, 6])
self.coords.append([3, 7])
self.coords.append([4, 0])
self.coords.append([4, 1])
self.coords.append([4, 2])
self.coords.append([4, 3])
self.coords.append([4, 4])
self.coords.append([4, 5])
self.coords.append([4, 6])
self.coords.append([4, 7])
self.coords.append([4, 8])
self.flash = 6
elif glyph == 'n':
self.coords.append([0, 3])
self.coords.append([2, 3])
self.coords.append([3, 3])
self.coords.append([0, 4])
self.coords.append([1, 4])
self.coords.append([4, 4])
self.coords.append([0, 5])
self.coords.append([4, 5])
self.coords.append([0, 6])
self.coords.append([4, 6])
self.coords.append([0, 7])
self.coords.append([4, 7])
self.coords.append([0, 8])
self.coords.append([4, 8])
self.flash = 6
elif glyph == 'O':
self.coords.append([0, 2])
self.coords.append([0, 3])
self.coords.append([0, 4])
self.coords.append([0, 5])
self.coords.append([0, 6])
self.coords.append([1, 1])
self.coords.append([1, 7])
self.coords.append([2, 0])
self.coords.append([2, 8])
self.coords.append([3, 0])
self.coords.append([3, 8])
self.coords.append([4, 1])
self.coords.append([4, 7])
self.coords.append([5, 2])
self.coords.append([5, 3])
self.coords.append([5, 4])
self.coords.append([5, 5])
self.coords.append([5, 6])
self.flash = 7
elif glyph == 'o':
self.coords.append([1, 3])
self.coords.append([2, 3])
self.coords.append([3, 3])
self.coords.append([0, 4])
self.coords.append([4, 4])
self.coords.append([0, 5])
self.coords.append([4, 5])
self.coords.append([0, 6])
self.coords.append([4, 6])
self.coords.append([0, 7])
self.coords.append([4, 7])
self.coords.append([1, 8])
self.coords.append([2, 8])
self.coords.append([3, 8])
self.flash = 6
elif glyph == 'P':
self.coords.append([0, 0])
self.coords.append([0, 1])
self.coords.append([0, 2])
self.coords.append([0, 3])
self.coords.append([0, 4])
self.coords.append([0, 5])
self.coords.append([0, 6])
self.coords.append([0, 7])
self.coords.append([0, 8])
self.coords.append([1, 0])
self.coords.append([1, 5])
self.coords.append([2, 0])
self.coords.append([2, 5])
self.coords.append([3, 0])
self.coords.append([3, 4])
self.coords.append([4, 1])
self.coords.append([4, 2])
self.coords.append([4, 3])
self.flash = 6
elif glyph == 'p':
self.coords.append([0, 3])
self.coords.append([1, 3])
self.coords.append([2, 3])
self.coords.append([3, 3])
self.coords.append([0, 4])
self.coords.append([4, 4])
self.coords.append([0, 5])
self.coords.append([4, 5])
self.coords.append([0, 6])
self.coords.append([4, 6])
self.coords.append([0, 7])
self.coords.append([4, 7])
self.coords.append([0, 8])
self.coords.append([1, 8])
self.coords.append([2, 8])
self.coords.append([3, 8])
self.coords.append([0, 9])
self.coords.append([0, 10])
self.flash = 6
elif glyph == 'Q':
self.coords.append([0, 2])
self.coords.append([0, 3])
self.coords.append([0, 4])
self.coords.append([0, 5])
self.coords.append([0, 6])
self.coords.append([1, 1])
self.coords.append([1, 7])
self.coords.append([2, 0])
self.coords.append([2, 8])
self.coords.append([3, 0])
self.coords.append([3, 8])
self.coords.append([3, 9])
self.coords.append([4, 1])
self.coords.append([4, 7])
self.coords.append([4, 10])
self.coords.append([5, 2])
self.coords.append([5, 3])
self.coords.append([5, 4])
self.coords.append([5, 5])
self.coords.append([5, 6])
self.coords.append([5, 10])
self.coords.append([6, 10])
self.flash = 7
elif glyph == 'q':
self.coords.append([1, 3])
self.coords.append([2, 3])
self.coords.append([3, 3])
self.coords.append([4, 3])
self.coords.append([0, 4])
self.coords.append([4, 4])
self.coords.append([0, 5])
self.coords.append([4, 5])
self.coords.append([0, 6])
self.coords.append([4, 6])
self.coords.append([0, 7])
self.coords.append([3, 7])
self.coords.append([4, 7])
self.coords.append([1, 8])
self.coords.append([2, 8])
self.coords.append([4, 8])
self.coords.append([4, 9])
self.coords.append([4, 10])
self.flash = 6
elif glyph == 'R':
self.coords.append([0, 0])
self.coords.append([0, 1])
self.coords.append([0, 2])
self.coords.append([0, 3])
self.coords.append([0, 4])
self.coords.append([0, 5])
self.coords.append([0, 6])
self.coords.append([0, 7])
self.coords.append([0, 8])
self.coords.append([1, 0])
self.coords.append([1, 4])
self.coords.append([2, 0])
self.coords.append([2, 4])
self.coords.append([3, 0])
self.coords.append([3, 3])
self.coords.append([3, 5])
self.coords.append([3, 6])
self.coords.append([4, 1])
self.coords.append([4, 2])
self.coords.append([4, 7])
self.coords.append([4, 8])
self.flash = 6
elif glyph == 'r':
self.coords.append([0, 3])
self.coords.append([1, 3])
self.coords.append([3, 3])
self.coords.append([4, 3])
self.coords.append([1, 4])
self.coords.append([2, 4])
self.coords.append([1, 5])
self.coords.append([1, 6])
self.coords.append([1, 7])
self.coords.append([0, 8])
self.coords.append([1, 8])
self.coords.append([2, 8])
self.flash = 6
elif glyph == 'S':
self.coords.append([0, 1])
self.coords.append([0, 2])
self.coords.append([0, 3])
self.coords.append([0, 8])
self.coords.append([1, 0])
self.coords.append([1, 4])
self.coords.append([1, 8])
self.coords.append([2, 0])
self.coords.append([2, 4])
self.coords.append([2, 8])
self.coords.append([3, 0])
self.coords.append([3, 5])
self.coords.append([3, 8])
self.coords.append([4, 6])
self.coords.append([4, 7])
self.flash = 6
elif glyph == 's':
self.coords.append([1, 3])
self.coords.append([2, 3])
self.coords.append([3, 3])
self.coords.append([0, 4])
self.coords.append([4, 4])
self.coords.append([1, 5])
self.coords.append([2, 5])
self.coords.append([3, 6])
self.coords.append([0, 7])
self.coords.append([4, 7])
self.coords.append([1, 8])
self.coords.append([2, 8])
self.coords.append([3, 8])
self.flash = 6
elif glyph == 'T':
self.coords.append([0, 0])
self.coords.append([1, 0])
self.coords.append([2, 0])
self.coords.append([3, 0])
self.coords.append([3, 1])
self.coords.append([3, 2])
self.coords.append([3, 3])
self.coords.append([3, 4])
self.coords.append([3, 5])
self.coords.append([3, 6])
self.coords.append([3, 7])
self.coords.append([3, 8])
self.coords.append([4, 0])
self.coords.append([5, 0])
self.coords.append([6, 0])
self.flash = 8
elif glyph == 't':
self.coords.append([1, 1])
self.coords.append([1, 2])
self.coords.append([0, 3])
self.coords.append([1, 3])
self.coords.append([2, 3])
self.coords.append([3, 3])
self.coords.append([1, 4])
self.coords.append([1, 5])
self.coords.append([1, 6])
self.coords.append([1, 7])
self.coords.append([4, 7])
self.coords.append([2, 8])
self.coords.append([3, 8])
self.flash = 6
elif glyph == 'U':
self.coords.append([0, 0])
self.coords.append([0, 1])
self.coords.append([0, 2])
self.coords.append([0, 3])
self.coords.append([0, 4])
self.coords.append([0, 5])
self.coords.append([0, 6])
self.coords.append([0, 7])
self.coords.append([1, 8])
self.coords.append([2, 8])
self.coords.append([3, 8])
self.coords.append([4, 0])
self.coords.append([4, 1])
self.coords.append([4, 2])
self.coords.append([4, 3])
self.coords.append([4, 4])
self.coords.append([4, 5])
self.coords.append([4, 6])
self.coords.append([4, 7])
self.flash = 6
elif glyph == 'u':
self.coords.append([0, 3])
self.coords.append([4, 3])
self.coords.append([0, 4])
self.coords.append([4, 4])
self.coords.append([0, 5])
self.coords.append([4, 5])
self.coords.append([0, 6])
self.coords.append([4, 6])
self.coords.append([0, 7])
self.coords.append([3, 7])
self.coords.append([4, 7])
self.coords.append([1, 8])
self.coords.append([2, 8])
self.coords.append([4, 8])
self.flash = 6
elif glyph == 'V':
self.coords.append([0, 0])
self.coords.append([0, 1])
self.coords.append([1, 2])
self.coords.append([1, 3])
self.coords.append([1, 4])
self.coords.append([2, 5])
self.coords.append([2, 6])
self.coords.append([3, 7])
self.coords.append([3, 8])
self.coords.append([4, 4])
self.coords.append([4, 5])
self.coords.append([4, 6])
self.coords.append([5, 1])
self.coords.append([5, 2])
self.coords.append([5, 3])
self.coords.append([6, 0])
self.flash = 8
elif glyph == 'v':
self.coords.append([0, 3])
self.coords.append([4, 3])
self.coords.append([0, 4])
self.coords.append([4, 4])
self.coords.append([1, 5])
self.coords.append([3, 5])
self.coords.append([1, 6])
self.coords.append([3, 6])
self.coords.append([2, 7])
self.coords.append([2, 8])
self.flash = 6
elif glyph == 'W':
self.coords.append([0, 0])
self.coords.append([0, 1])
self.coords.append([0, 2])
self.coords.append([0, 3])
self.coords.append([0, 4])
self.coords.append([1, 5])
self.coords.append([1, 6])
self.coords.append([1, 7])
self.coords.append([1, 8])
self.coords.append([2, 3])
self.coords.append([2, 4])
self.coords.append([2, 5])
self.coords.append([3, 0])
self.coords.append([3, 1])
self.coords.append([3, 2])
self.coords.append([4, 3])
self.coords.append([4, 4])
self.coords.append([4, 5])
self.coords.append([5, 5])
self.coords.append([5, 6])
self.coords.append([5, 7])
self.coords.append([5, 8])
self.coords.append([6, 0])
self.coords.append([6, 1])
self.coords.append([6, 2])
self.coords.append([6, 3])
self.coords.append([6, 4])
self.flash = 8
elif glyph == 'w':
self.coords.append([0, 3])
self.coords.append([4, 3])
self.coords.append([0, 4])
self.coords.append([4, 4])
self.coords.append([0, 5])
self.coords.append([2, 5])
self.coords.append([4, 5])
self.coords.append([0, 6])
self.coords.append([2, 6])
self.coords.append([4, 6])
self.coords.append([1, 7])
self.coords.append([3, 7])
self.coords.append([1, 8])
self.coords.append([3, 8])
self.flash = 6
elif glyph == 'X':
self.coords.append([0, 0])
self.coords.append([0, 7])
self.coords.append([0, 8])
self.coords.append([1, 1])
self.coords.append([1, 2])
self.coords.append([1, 5])
self.coords.append([1, 6])
self.coords.append([2, 3])
self.coords.append([2, 4])
self.coords.append([3, 1])
self.coords.append([3, 2])
self.coords.append([3, 5])
self.coords.append([3, 6])
self.coords.append([4, 0])
self.coords.append([4, 7])
self.coords.append([4, 8])
self.flash = 6
elif glyph == 'x':
self.coords.append([0, 3])
self.coords.append([4, 3])
self.coords.append([1, 4])
self.coords.append([3, 4])
self.coords.append([2, 5])
self.coords.append([2, 6])
self.coords.append([1, 7])
self.coords.append([3, 7])
self.coords.append([0, 8])
self.coords.append([4, 8])
self.flash = 6
elif glyph == 'Y':
self.coords.append([0, 0])
self.coords.append([0, 1])
self.coords.append([1, 2])
self.coords.append([1, 3])
self.coords.append([2, 4])
self.coords.append([2, 5])
self.coords.append([2, 6])
self.coords.append([2, 7])
self.coords.append([2, 8])
self.coords.append([3, 2])
self.coords.append([3, 3])
self.coords.append([4, 0])
self.coords.append([4, 1])
self.flash = 6
elif glyph == 'y':
self.coords.append([0, 3])
self.coords.append([4, 3])
self.coords.append([0, 4])
self.coords.append([4, 4])
self.coords.append([1, 5])
self.coords.append([3, 5])
self.coords.append([1, 6])
self.coords.append([3, 6])
self.coords.append([2, 7])
self.coords.append([2, 8])
self.coords.append([1, 9])
self.coords.append([0, 10])
self.flash = 6
elif glyph == 'Z':
self.coords.append([0, 0])
self.coords.append([0, 7])
self.coords.append([0, 8])
self.coords.append([1, 0])
self.coords.append([1, 5])
self.coords.append([1, 6])
self.coords.append([1, 8])
self.coords.append([2, 0])
self.coords.append([2, 4])
self.coords.append([2, 8])
self.coords.append([3, 0])
self.coords.append([3, 2])
self.coords.append([3, 3])
self.coords.append([3, 8])
self.coords.append([4, 0])
self.coords.append([4, 1])
self.coords.append([4, 8])
self.flash = 6
elif glyph == 'z':
self.coords.append([0, 3])
self.coords.append([1, 3])
self.coords.append([2, 3])
self.coords.append([3, 3])
self.coords.append([4, 3])
self.coords.append([3, 4])
self.coords.append([2, 5])
self.coords.append([1, 6])
self.coords.append([0, 7])
self.coords.append([0, 8])
self.coords.append([1, 8])
self.coords.append([2, 8])
self.coords.append([3, 8])
self.coords.append([4, 8])
self.flash = 6
elif glyph == '0':
self.coords.append([1, 0])
self.coords.append([2, 0])
self.coords.append([3, 0])
self.coords.append([0, 1])
self.coords.append([4, 1])
self.coords.append([0, 2])
self.coords.append([4, 2])
self.coords.append([0, 3])
self.coords.append([3, 3])
self.coords.append([4, 3])
self.coords.append([0, 4])
self.coords.append([2, 4])
self.coords.append([4, 4])
self.coords.append([0, 5])
self.coords.append([1, 5])
self.coords.append([4, 5])
self.coords.append([0, 6])
self.coords.append([4, 6])
self.coords.append([0, 7])
self.coords.append([4, 7])
self.coords.append([1, 8])
self.coords.append([2, 8])
self.coords.append([3, 8])
self.flash = 6
elif glyph == '1':
self.coords.append([2, 0])
self.coords.append([1, 1])
self.coords.append([2, 1])
self.coords.append([0, 2])
self.coords.append([2, 2])
self.coords.append([2, 3])
self.coords.append([2, 4])
self.coords.append([2, 5])
self.coords.append([2, 6])
self.coords.append([2, 7])
self.coords.append([0, 8])
self.coords.append([1, 8])
self.coords.append([2, 8])
self.coords.append([3, 8])
self.coords.append([4, 8])
self.flash = 6
elif glyph == '2':
self.coords.append([1, 0])
self.coords.append([2, 0])
self.coords.append([3, 0])
self.coords.append([0, 1])
self.coords.append([4, 1])
self.coords.append([4, 2])
self.coords.append([4, 3])
self.coords.append([3, 4])
self.coords.append([2, 5])
self.coords.append([1, 6])
self.coords.append([0, 7])
self.coords.append([0, 8])
self.coords.append([1, 8])
self.coords.append([2, 8])
self.coords.append([3, 8])
self.coords.append([4, 8])
self.flash = 6
elif glyph == '3':
self.coords.append([1, 0])
self.coords.append([2, 0])
self.coords.append([3, 0])
self.coords.append([0, 1])
self.coords.append([4, 1])
self.coords.append([4, 2])
self.coords.append([4, 3])
self.coords.append([2, 4])
self.coords.append([3, 4])
self.coords.append([4, 5])
self.coords.append([4, 6])
self.coords.append([0, 7])
self.coords.append([4, 7])
self.coords.append([1, 8])
self.coords.append([2, 8])
self.coords.append([3, 8])
self.flash = 6
elif glyph == '4':
self.coords.append([1, 0])
self.coords.append([3, 0])
self.coords.append([1, 1])
self.coords.append([3, 1])
self.coords.append([0, 2])
self.coords.append([3, 2])
self.coords.append([0, 3])
self.coords.append([3, 3])
self.coords.append([0, 4])
self.coords.append([1, 4])
self.coords.append([2, 4])
self.coords.append([3, 4])
self.coords.append([4, 4])
self.coords.append([3, 5])
self.coords.append([3, 6])
self.coords.append([3, 7])
self.coords.append([3, 8])
self.flash = 6
elif glyph == '5':
self.coords.append([0, 0])
self.coords.append([1, 0])
self.coords.append([2, 0])
self.coords.append([3, 0])
self.coords.append([4, 0])
self.coords.append([0, 1])
self.coords.append([0, 2])
self.coords.append([0, 3])
self.coords.append([1, 3])
self.coords.append([2, 3])
self.coords.append([3, 3])
self.coords.append([0, 4])
self.coords.append([4, 4])
self.coords.append([4, 5])
self.coords.append([4, 6])
self.coords.append([4, 7])
self.coords.append([0, 8])
self.coords.append([1, 8])
self.coords.append([2, 8])
self.coords.append([3, 8])
self.flash = 6
elif glyph == '6':
self.coords.append([2, 0])
self.coords.append([3, 0])
self.coords.append([1, 1])
self.coords.append([0, 2])
self.coords.append([0, 3])
self.coords.append([1, 3])
self.coords.append([2, 3])
self.coords.append([3, 3])
self.coords.append([0, 4])
self.coords.append([4, 4])
self.coords.append([0, 5])
self.coords.append([4, 5])
self.coords.append([0, 6])
self.coords.append([4, 6])
self.coords.append([0, 7])
self.coords.append([4, 7])
self.coords.append([1, 8])
self.coords.append([2, 8])
self.coords.append([3, 8])
self.flash = 6
elif glyph == '7':
self.coords.append([0, 0])
self.coords.append([1, 0])
self.coords.append([2, 0])
self.coords.append([3, 0])
self.coords.append([4, 0])
self.coords.append([5, 0])
self.coords.append([0, 1])
self.coords.append([5, 1])
self.coords.append([5, 2])
self.coords.append([4, 3])
self.coords.append([4, 4])
self.coords.append([3, 5])
self.coords.append([3, 6])
self.coords.append([2, 7])
self.coords.append([2, 8])
self.flash = 7
elif glyph == '8':
self.coords.append([1, 0])
self.coords.append([2, 0])
self.coords.append([3, 0])
self.coords.append([0, 1])
self.coords.append([4, 1])
self.coords.append([0, 2])
self.coords.append([4, 2])
self.coords.append([1, 3])
self.coords.append([2, 3])
self.coords.append([3, 3])
self.coords.append([0, 4])
self.coords.append([4, 4])
self.coords.append([0, 5])
self.coords.append([4, 5])
self.coords.append([0, 6])
self.coords.append([4, 6])
self.coords.append([0, 7])
self.coords.append([4, 7])
self.coords.append([1, 8])
self.coords.append([2, 8])
self.coords.append([3, 8])
self.flash = 6
elif glyph == '9':
self.coords.append([1, 0])
self.coords.append([2, 0])
self.coords.append([3, 0])
self.coords.append([0, 1])
self.coords.append([4, 1])
self.coords.append([0, 2])
self.coords.append([4, 2])
self.coords.append([0, 3])
self.coords.append([4, 3])
self.coords.append([1, 4])
self.coords.append([2, 4])
self.coords.append([3, 4])
self.coords.append([4, 4])
self.coords.append([0, 4])
self.coords.append([4, 4])
self.coords.append([4, 5])
self.coords.append([4, 6])
self.coords.append([3, 7])
self.coords.append([1, 8])
self.coords.append([2, 8])
self.flash = 6
elif glyph == '-':
self.coords.append([0, 4])
self.coords.append([1, 4])
self.coords.append([2, 4])
self.coords.append([3, 4])
self.flash = 6
elif glyph == '.':
self.coords.append([0, 7])
self.coords.append([1, 7])
self.coords.append([0, 8])
self.coords.append([1, 8])
self.flash = 4
elif glyph == '!':
self.coords.append([0, 0])
self.coords.append([1, 0])
self.coords.append([0, 1])
self.coords.append([1, 1])
self.coords.append([0, 2])
self.coords.append([1, 2])
self.coords.append([0, 3])
self.coords.append([1, 3])
self.coords.append([0, 4])
self.coords.append([1, 4])
self.coords.append([0, 5])
self.coords.append([1, 5])
self.coords.append([0, 7])
self.coords.append([1, 7])
self.coords.append([0, 8])
self.coords.append([1, 8])
self.flash = 4
elif glyph == ',':
self.coords.append([0, 7])
self.coords.append([1, 7])
self.coords.append([0, 8])
self.coords.append([1, 8])
self.coords.append([1, 9])
self.coords.append([0, 10])
self.flash = 4
elif glyph == '\'':
self.coords.append([0, 0])
self.coords.append([1, 0])
self.coords.append([0, 1])
self.coords.append([1, 1])
self.coords.append([1, 2])
self.flash = 4
elif glyph == '"':
self.coords.append([0, 0])
self.coords.append([0, 1])
self.coords.append([0, 2])
self.coords.append([2, 0])
self.coords.append([2, 1])
self.coords.append([2, 2])
self.flash = 4
elif glyph == ' ':
self.flash = 6
elif glyph == '\t':
self.flash = 24
elif glyph == '(':
self.coords.append([2, 0])
self.coords.append([1, 1])
self.coords.append([0, 2])
self.coords.append([0, 3])
self.coords.append([0, 4])
self.coords.append([0, 5])
self.coords.append([0, 6])
self.coords.append([0, 7])
self.coords.append([0, 8])
self.coords.append([1, 9])
self.coords.append([2, 10])
self.flash = 6
elif glyph == ')':
self.coords.append([0, 0])
self.coords.append([1, 1])
self.coords.append([2, 2])
self.coords.append([2, 3])
self.coords.append([2, 4])
self.coords.append([2, 5])
self.coords.append([2, 6])
self.coords.append([2, 7])
self.coords.append([2, 8])
self.coords.append([1, 9])
self.coords.append([0, 10])
self.flash = 6
elif glyph == ')':
self.coords.append([0, 0])
self.coords.append([1, 1])
self.coords.append([2, 2])
self.coords.append([2, 3])
self.coords.append([2, 4])
self.coords.append([2, 5])
self.coords.append([2, 6])
self.coords.append([2, 7])
self.coords.append([2, 8])
self.coords.append([1, 9])
self.coords.append([0, 10])
self.flash = 6
elif glyph == ':':
self.coords.append([0, 3])
self.coords.append([1, 3])
self.coords.append([0, 4])
self.coords.append([1, 4])
self.coords.append([0, 7])
self.coords.append([1, 7])
self.coords.append([0, 8])
self.coords.append([1, 8])
self.flash = 5
elif glyph == ';':
self.coords.append([0, 3])
self.coords.append([1, 3])
self.coords.append([0, 4])
self.coords.append([1, 4])
self.coords.append([0, 7])
self.coords.append([1, 7])
self.coords.append([1, 8])
self.coords.append([0, 9])
self.flash = 5
elif glyph == '_':
self.coords.append([0, 8])
self.coords.append([1, 8])
self.coords.append([2, 8])
self.coords.append([3, 8])
self.coords.append([4, 8])
self.coords.append([5, 8])
self.flash = 7
else:
self.flash = 6
def center(self, wide=6):
glwide = self.flash - 2
adjust = (wide-glwide)/2
for cor in self.coords:
cor[0] += adjust
self._flash(wide+2)
def _flash(self, flash):
self.flash = flash
def glyphstr_length(gls):
""" Returns length of glyphstr gls
"""
length = 0
for gl in gls:
length += gl.flash
return length - 2
def glyphstr_monospace(gls, wide=6):
""" for each GlyphObject in gls, calls .center(wide)
"""
for gl in gls:
gl.center(wide)
def glyphstr_center(gls, width=100):
""" given a width of an area (such as column heading width) it will adjust the start point of each glyph in a glyphstr_, centering the string
"""
length = glyphstr_length(gls)
glen = len(gls)
#addlen = (width-length)/(glen))
print length
print width - length
hl = (width-length)/2
for i in range(0, glen):
gl = gls[i]
flash = gl.flash
gl._flash(flash+hl)
def glyphstr_justify(gls, width=100):
""" given a width of an area (such as column heading width) it will adjust the start point of each glyph in a glyphstr_, justifying the string
"""
length = glyphstr_length(gls)
glen = len(gls)
#addlen = (width-length)/(glen))
print length
print width - length
ct = 0
for i in range(0, width-length):
if ct >= glen-1:
ct = 0
gl = gls[ct]
flash = gl.flash
gl._flash(flash+1)
ct += 1
def glyphstr_bounds_get(string, mono=False):
""" Returns 2 len integer array, size and height of string as glyphstr_
"""
#xk = 0
#yk = 0
xz = 0
#yz = 10
vals = string.split('\n')
yz = len(vals) * 10
for val in vals:
gs = glyphstr_get(val)
if mono:
glyphstr_monospace(gs)
sz = glyphstr_length(gs)
if sz > xz:
xz = sz
return [xz, yz]
def glyphstr_get(string):
""" given a string, Returns glyphs, a list of glyphs
"""
glyphs = []
i = 0
while i < len(string):
letter = string[i:i+1]
glyphs.append(GlyphObject(letter))
i += 1
return glyphs
| 26.759134
| 143
| 0.596207
| 6,505
| 39,550
| 3.621214
| 0.023367
| 0.482255
| 0.77025
| 0.241764
| 0.946808
| 0.942265
| 0.936619
| 0.934199
| 0.932841
| 0.92295
| 0
| 0.074452
| 0.181542
| 39,550
| 1,477
| 144
| 26.777251
| 0.653259
| 0.01115
| 0
| 0.900888
| 0
| 0
| 0.002051
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0
| null | null | 0.002959
| 0
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 11
|
ffc234c8fa1382a81cd3f2b1ea5e202da915c840
| 28,482
|
py
|
Python
|
swagger_client/models/client_configuration.py
|
chbndrhnns/finapi-client
|
259beda8b05e912c49d2dc4c3ed71205134e5d8a
|
[
"MIT"
] | 2
|
2019-04-15T05:58:21.000Z
|
2021-11-15T18:26:37.000Z
|
swagger_client/models/client_configuration.py
|
chbndrhnns/finapi-client
|
259beda8b05e912c49d2dc4c3ed71205134e5d8a
|
[
"MIT"
] | 1
|
2021-06-18T09:46:25.000Z
|
2021-06-18T20:12:41.000Z
|
swagger_client/models/client_configuration.py
|
chbndrhnns/finapi-client
|
259beda8b05e912c49d2dc4c3ed71205134e5d8a
|
[
"MIT"
] | 2
|
2019-07-08T13:41:09.000Z
|
2020-12-07T12:10:04.000Z
|
# coding: utf-8
"""
finAPI RESTful Services
finAPI RESTful Services # noqa: E501
OpenAPI spec version: v1.42.1
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
import pprint
import re # noqa: F401
import six
class ClientConfiguration(object):
"""NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
"""
"""
Attributes:
swagger_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
swagger_types = {
'is_automatic_batch_update_enabled': 'bool',
'user_notification_callback_url': 'str',
'user_synchronization_callback_url': 'str',
'refresh_tokens_validity_period': 'int',
'user_access_tokens_validity_period': 'int',
'client_access_tokens_validity_period': 'int',
'max_user_login_attempts': 'int',
'is_user_auto_verification_enabled': 'bool',
'is_mandator_admin': 'bool',
'is_web_scraping_enabled': 'bool',
'available_bank_groups': 'list[str]'
}
attribute_map = {
'is_automatic_batch_update_enabled': 'isAutomaticBatchUpdateEnabled',
'user_notification_callback_url': 'userNotificationCallbackUrl',
'user_synchronization_callback_url': 'userSynchronizationCallbackUrl',
'refresh_tokens_validity_period': 'refreshTokensValidityPeriod',
'user_access_tokens_validity_period': 'userAccessTokensValidityPeriod',
'client_access_tokens_validity_period': 'clientAccessTokensValidityPeriod',
'max_user_login_attempts': 'maxUserLoginAttempts',
'is_user_auto_verification_enabled': 'isUserAutoVerificationEnabled',
'is_mandator_admin': 'isMandatorAdmin',
'is_web_scraping_enabled': 'isWebScrapingEnabled',
'available_bank_groups': 'availableBankGroups'
}
def __init__(self, is_automatic_batch_update_enabled=False, user_notification_callback_url=None, user_synchronization_callback_url=None, refresh_tokens_validity_period=None, user_access_tokens_validity_period=None, client_access_tokens_validity_period=None, max_user_login_attempts=None, is_user_auto_verification_enabled=False, is_mandator_admin=False, is_web_scraping_enabled=False, available_bank_groups=None): # noqa: E501
"""ClientConfiguration - a model defined in Swagger""" # noqa: E501
self._is_automatic_batch_update_enabled = None
self._user_notification_callback_url = None
self._user_synchronization_callback_url = None
self._refresh_tokens_validity_period = None
self._user_access_tokens_validity_period = None
self._client_access_tokens_validity_period = None
self._max_user_login_attempts = None
self._is_user_auto_verification_enabled = None
self._is_mandator_admin = None
self._is_web_scraping_enabled = None
self._available_bank_groups = None
self.discriminator = None
self.is_automatic_batch_update_enabled = is_automatic_batch_update_enabled
if user_notification_callback_url is not None:
self.user_notification_callback_url = user_notification_callback_url
if user_synchronization_callback_url is not None:
self.user_synchronization_callback_url = user_synchronization_callback_url
if refresh_tokens_validity_period is not None:
self.refresh_tokens_validity_period = refresh_tokens_validity_period
if user_access_tokens_validity_period is not None:
self.user_access_tokens_validity_period = user_access_tokens_validity_period
if client_access_tokens_validity_period is not None:
self.client_access_tokens_validity_period = client_access_tokens_validity_period
self.max_user_login_attempts = max_user_login_attempts
self.is_user_auto_verification_enabled = is_user_auto_verification_enabled
self.is_mandator_admin = is_mandator_admin
self.is_web_scraping_enabled = is_web_scraping_enabled
self.available_bank_groups = available_bank_groups
@property
def is_automatic_batch_update_enabled(self):
"""Gets the is_automatic_batch_update_enabled of this ClientConfiguration. # noqa: E501
Whether finAPI performs a regular automatic update of your users' bank connections. To find out how the automatic batch update is configured for your client, i.e. which bank connections get updated, and at which time and interval, please contact your Sys-Admin. Note that even if the automatic batch update is enabled for your client, individual users can still disable the feature for their own bank connections. # noqa: E501
:return: The is_automatic_batch_update_enabled of this ClientConfiguration. # noqa: E501
:rtype: bool
"""
return self._is_automatic_batch_update_enabled
@is_automatic_batch_update_enabled.setter
def is_automatic_batch_update_enabled(self, is_automatic_batch_update_enabled):
"""Sets the is_automatic_batch_update_enabled of this ClientConfiguration.
Whether finAPI performs a regular automatic update of your users' bank connections. To find out how the automatic batch update is configured for your client, i.e. which bank connections get updated, and at which time and interval, please contact your Sys-Admin. Note that even if the automatic batch update is enabled for your client, individual users can still disable the feature for their own bank connections. # noqa: E501
:param is_automatic_batch_update_enabled: The is_automatic_batch_update_enabled of this ClientConfiguration. # noqa: E501
:type: bool
"""
if is_automatic_batch_update_enabled is None:
raise ValueError("Invalid value for `is_automatic_batch_update_enabled`, must not be `None`") # noqa: E501
self._is_automatic_batch_update_enabled = is_automatic_batch_update_enabled
@property
def user_notification_callback_url(self):
"""Gets the user_notification_callback_url of this ClientConfiguration. # noqa: E501
Callback URL to which finAPI sends the notification messages that are triggered from the automatic batch update of the users' bank connections. This field is only relevant if the automatic batch update is enabled for your client. For details about what the notification messages look like, please see the documentation in the 'Notification Rules' section. finAPI will call this URL with HTTP method POST. Note that the response of the call is not processed by finAPI. Also note that while the callback URL may be a non-secured (http) URL on the finAPI sandbox or alpha environment, it MUST be a SSL-secured (https) URL on the finAPI live system. # noqa: E501
:return: The user_notification_callback_url of this ClientConfiguration. # noqa: E501
:rtype: str
"""
return self._user_notification_callback_url
@user_notification_callback_url.setter
def user_notification_callback_url(self, user_notification_callback_url):
"""Sets the user_notification_callback_url of this ClientConfiguration.
Callback URL to which finAPI sends the notification messages that are triggered from the automatic batch update of the users' bank connections. This field is only relevant if the automatic batch update is enabled for your client. For details about what the notification messages look like, please see the documentation in the 'Notification Rules' section. finAPI will call this URL with HTTP method POST. Note that the response of the call is not processed by finAPI. Also note that while the callback URL may be a non-secured (http) URL on the finAPI sandbox or alpha environment, it MUST be a SSL-secured (https) URL on the finAPI live system. # noqa: E501
:param user_notification_callback_url: The user_notification_callback_url of this ClientConfiguration. # noqa: E501
:type: str
"""
self._user_notification_callback_url = user_notification_callback_url
@property
def user_synchronization_callback_url(self):
"""Gets the user_synchronization_callback_url of this ClientConfiguration. # noqa: E501
Callback URL for user synchronization. This field should be set if you - as a finAPI customer - have multiple clients using finAPI. In such case, all of your clients will share the same user base, making it possible for a user to be created in one client, but then deleted in another. To keep the client-side user data consistent in all clients, you should set a callback URL for each client. finAPI will send a notification to the callback URL of each client whenever a user of your user base gets deleted. Note that finAPI will send a deletion notification to ALL clients, including the one that made the user deletion request to finAPI. So when deleting a user in finAPI, a client should rely on the callback to delete the user on its own side. <p>The notification that finAPI sends to the clients' callback URLs will be a POST request, with this body: <pre>{ \"userId\" : string // contains the identifier of the deleted user \"event\" : string // this will always be \"DELETED\" }</pre><br/>Note that finAPI does not process the response of this call. Also note that while the callback URL may be a non-secured (http) URL on the finAPI sandbox or alpha environment, it MUST be a SSL-secured (https) URL on the finAPI live system.</p>As long as you have just one client, you can ignore this field and let it be null. However keep in mind that in this case your client will not receive any callback when a user gets deleted - so the deletion of the user on the client-side must not be forgotten. Of course you may still use the callback URL even for just one client, if you want to implement the deletion of the user on the client-side via the callback from finAPI. # noqa: E501
:return: The user_synchronization_callback_url of this ClientConfiguration. # noqa: E501
:rtype: str
"""
return self._user_synchronization_callback_url
@user_synchronization_callback_url.setter
def user_synchronization_callback_url(self, user_synchronization_callback_url):
"""Sets the user_synchronization_callback_url of this ClientConfiguration.
Callback URL for user synchronization. This field should be set if you - as a finAPI customer - have multiple clients using finAPI. In such case, all of your clients will share the same user base, making it possible for a user to be created in one client, but then deleted in another. To keep the client-side user data consistent in all clients, you should set a callback URL for each client. finAPI will send a notification to the callback URL of each client whenever a user of your user base gets deleted. Note that finAPI will send a deletion notification to ALL clients, including the one that made the user deletion request to finAPI. So when deleting a user in finAPI, a client should rely on the callback to delete the user on its own side. <p>The notification that finAPI sends to the clients' callback URLs will be a POST request, with this body: <pre>{ \"userId\" : string // contains the identifier of the deleted user \"event\" : string // this will always be \"DELETED\" }</pre><br/>Note that finAPI does not process the response of this call. Also note that while the callback URL may be a non-secured (http) URL on the finAPI sandbox or alpha environment, it MUST be a SSL-secured (https) URL on the finAPI live system.</p>As long as you have just one client, you can ignore this field and let it be null. However keep in mind that in this case your client will not receive any callback when a user gets deleted - so the deletion of the user on the client-side must not be forgotten. Of course you may still use the callback URL even for just one client, if you want to implement the deletion of the user on the client-side via the callback from finAPI. # noqa: E501
:param user_synchronization_callback_url: The user_synchronization_callback_url of this ClientConfiguration. # noqa: E501
:type: str
"""
self._user_synchronization_callback_url = user_synchronization_callback_url
@property
def refresh_tokens_validity_period(self):
"""Gets the refresh_tokens_validity_period of this ClientConfiguration. # noqa: E501
The validity period that newly requested refresh tokens initially have (in seconds). A value of 0 means that the tokens never expire (Unless explicitly invalidated, e.g. by revocation, or when a user gets locked, or when the password is reset for a user). # noqa: E501
:return: The refresh_tokens_validity_period of this ClientConfiguration. # noqa: E501
:rtype: int
"""
return self._refresh_tokens_validity_period
@refresh_tokens_validity_period.setter
def refresh_tokens_validity_period(self, refresh_tokens_validity_period):
"""Sets the refresh_tokens_validity_period of this ClientConfiguration.
The validity period that newly requested refresh tokens initially have (in seconds). A value of 0 means that the tokens never expire (Unless explicitly invalidated, e.g. by revocation, or when a user gets locked, or when the password is reset for a user). # noqa: E501
:param refresh_tokens_validity_period: The refresh_tokens_validity_period of this ClientConfiguration. # noqa: E501
:type: int
"""
self._refresh_tokens_validity_period = refresh_tokens_validity_period
@property
def user_access_tokens_validity_period(self):
"""Gets the user_access_tokens_validity_period of this ClientConfiguration. # noqa: E501
The validity period that newly requested access tokens for users initially have (in seconds). A value of 0 means that the tokens never expire (Unless explicitly invalidated, e.g. by revocation , or when a user gets locked, or when the password is reset for a user). # noqa: E501
:return: The user_access_tokens_validity_period of this ClientConfiguration. # noqa: E501
:rtype: int
"""
return self._user_access_tokens_validity_period
@user_access_tokens_validity_period.setter
def user_access_tokens_validity_period(self, user_access_tokens_validity_period):
"""Sets the user_access_tokens_validity_period of this ClientConfiguration.
The validity period that newly requested access tokens for users initially have (in seconds). A value of 0 means that the tokens never expire (Unless explicitly invalidated, e.g. by revocation , or when a user gets locked, or when the password is reset for a user). # noqa: E501
:param user_access_tokens_validity_period: The user_access_tokens_validity_period of this ClientConfiguration. # noqa: E501
:type: int
"""
self._user_access_tokens_validity_period = user_access_tokens_validity_period
@property
def client_access_tokens_validity_period(self):
"""Gets the client_access_tokens_validity_period of this ClientConfiguration. # noqa: E501
The validity period that newly requested access tokens for clients initially have (in seconds). A value of 0 means that the tokens never expire (Unless explicitly invalidated, e.g. by revocation). # noqa: E501
:return: The client_access_tokens_validity_period of this ClientConfiguration. # noqa: E501
:rtype: int
"""
return self._client_access_tokens_validity_period
@client_access_tokens_validity_period.setter
def client_access_tokens_validity_period(self, client_access_tokens_validity_period):
"""Sets the client_access_tokens_validity_period of this ClientConfiguration.
The validity period that newly requested access tokens for clients initially have (in seconds). A value of 0 means that the tokens never expire (Unless explicitly invalidated, e.g. by revocation). # noqa: E501
:param client_access_tokens_validity_period: The client_access_tokens_validity_period of this ClientConfiguration. # noqa: E501
:type: int
"""
self._client_access_tokens_validity_period = client_access_tokens_validity_period
@property
def max_user_login_attempts(self):
"""Gets the max_user_login_attempts of this ClientConfiguration. # noqa: E501
Number of consecutive failed login attempts of a user into his finAPI account that is allowed before finAPI locks the user's account. When a user's account is locked, finAPI will invalidate all user's tokens and it will deny any service call in the context of this user (i.e. any call to a service using one of the user's authorization tokens, as well as the service for requesting a new token for this user). To unlock a user's account, a new password must be set for the account by the client (see the services /users/requestPasswordChange and /users/executePasswordChange). Once a new password has been set, all services will be available again for this user and the user's failed login attempts counter is reset to 0. The user's failed login attempts counter is also reset whenever a new authorization token has been successfully retrieved, or whenever the user himself changes his password.<br/><br/>Note that when this field has a value of 0, it means that there is no limit for user login attempts, i.e. finAPI will never lock user accounts. # noqa: E501
:return: The max_user_login_attempts of this ClientConfiguration. # noqa: E501
:rtype: int
"""
return self._max_user_login_attempts
@max_user_login_attempts.setter
def max_user_login_attempts(self, max_user_login_attempts):
"""Sets the max_user_login_attempts of this ClientConfiguration.
Number of consecutive failed login attempts of a user into his finAPI account that is allowed before finAPI locks the user's account. When a user's account is locked, finAPI will invalidate all user's tokens and it will deny any service call in the context of this user (i.e. any call to a service using one of the user's authorization tokens, as well as the service for requesting a new token for this user). To unlock a user's account, a new password must be set for the account by the client (see the services /users/requestPasswordChange and /users/executePasswordChange). Once a new password has been set, all services will be available again for this user and the user's failed login attempts counter is reset to 0. The user's failed login attempts counter is also reset whenever a new authorization token has been successfully retrieved, or whenever the user himself changes his password.<br/><br/>Note that when this field has a value of 0, it means that there is no limit for user login attempts, i.e. finAPI will never lock user accounts. # noqa: E501
:param max_user_login_attempts: The max_user_login_attempts of this ClientConfiguration. # noqa: E501
:type: int
"""
if max_user_login_attempts is None:
raise ValueError("Invalid value for `max_user_login_attempts`, must not be `None`") # noqa: E501
self._max_user_login_attempts = max_user_login_attempts
@property
def is_user_auto_verification_enabled(self):
"""Gets the is_user_auto_verification_enabled of this ClientConfiguration. # noqa: E501
Whether users that are created with this client are automatically verified on creation. If this field is set to 'false', then any user that is created with this client must first be verified with the \"Verify a user\" service before he can be authorized. If the field is 'true', then no verification is required by the client and the user can be authorized immediately after creation. # noqa: E501
:return: The is_user_auto_verification_enabled of this ClientConfiguration. # noqa: E501
:rtype: bool
"""
return self._is_user_auto_verification_enabled
@is_user_auto_verification_enabled.setter
def is_user_auto_verification_enabled(self, is_user_auto_verification_enabled):
"""Sets the is_user_auto_verification_enabled of this ClientConfiguration.
Whether users that are created with this client are automatically verified on creation. If this field is set to 'false', then any user that is created with this client must first be verified with the \"Verify a user\" service before he can be authorized. If the field is 'true', then no verification is required by the client and the user can be authorized immediately after creation. # noqa: E501
:param is_user_auto_verification_enabled: The is_user_auto_verification_enabled of this ClientConfiguration. # noqa: E501
:type: bool
"""
if is_user_auto_verification_enabled is None:
raise ValueError("Invalid value for `is_user_auto_verification_enabled`, must not be `None`") # noqa: E501
self._is_user_auto_verification_enabled = is_user_auto_verification_enabled
@property
def is_mandator_admin(self):
"""Gets the is_mandator_admin of this ClientConfiguration. # noqa: E501
Whether this client is a 'Mandator Admin'. Mandator Admins are special clients that can access the 'Mandator Administration' section of finAPI. If you do not yet have credentials for a Mandator Admin, please contact us at [email protected]. For further information, please refer to <a href='https://finapi.zendesk.com/hc/en-us/articles/115003661827-Difference-between-app-clients-and-mandator-admin-client'>this article</a> on our Dev Portal. # noqa: E501
:return: The is_mandator_admin of this ClientConfiguration. # noqa: E501
:rtype: bool
"""
return self._is_mandator_admin
@is_mandator_admin.setter
def is_mandator_admin(self, is_mandator_admin):
"""Sets the is_mandator_admin of this ClientConfiguration.
Whether this client is a 'Mandator Admin'. Mandator Admins are special clients that can access the 'Mandator Administration' section of finAPI. If you do not yet have credentials for a Mandator Admin, please contact us at [email protected]. For further information, please refer to <a href='https://finapi.zendesk.com/hc/en-us/articles/115003661827-Difference-between-app-clients-and-mandator-admin-client'>this article</a> on our Dev Portal. # noqa: E501
:param is_mandator_admin: The is_mandator_admin of this ClientConfiguration. # noqa: E501
:type: bool
"""
if is_mandator_admin is None:
raise ValueError("Invalid value for `is_mandator_admin`, must not be `None`") # noqa: E501
self._is_mandator_admin = is_mandator_admin
@property
def is_web_scraping_enabled(self):
"""Gets the is_web_scraping_enabled of this ClientConfiguration. # noqa: E501
Whether finAPI is allowed to use web scrapers for data download. If this field is set to 'true', then finAPI might download data from the online banking websites of banks (either in addition to using the FinTS interface, or as the sole data source for the download). If this field is set to 'false', then finAPI will not use any web scrapers and instead download data only from FinTS servers. For banks where no FinTS interface is available, finAPI will not allow any data download at all if web scraping is disabled for your client. Please contact your Sys-Admin if you want to change this setting. # noqa: E501
:return: The is_web_scraping_enabled of this ClientConfiguration. # noqa: E501
:rtype: bool
"""
return self._is_web_scraping_enabled
@is_web_scraping_enabled.setter
def is_web_scraping_enabled(self, is_web_scraping_enabled):
"""Sets the is_web_scraping_enabled of this ClientConfiguration.
Whether finAPI is allowed to use web scrapers for data download. If this field is set to 'true', then finAPI might download data from the online banking websites of banks (either in addition to using the FinTS interface, or as the sole data source for the download). If this field is set to 'false', then finAPI will not use any web scrapers and instead download data only from FinTS servers. For banks where no FinTS interface is available, finAPI will not allow any data download at all if web scraping is disabled for your client. Please contact your Sys-Admin if you want to change this setting. # noqa: E501
:param is_web_scraping_enabled: The is_web_scraping_enabled of this ClientConfiguration. # noqa: E501
:type: bool
"""
if is_web_scraping_enabled is None:
raise ValueError("Invalid value for `is_web_scraping_enabled`, must not be `None`") # noqa: E501
self._is_web_scraping_enabled = is_web_scraping_enabled
@property
def available_bank_groups(self):
"""Gets the available_bank_groups of this ClientConfiguration. # noqa: E501
List of bank groups that are available to this client. A bank group is a collection of all banks that are located in a certain country, and is defined by the country's ISO 3166 ALPHA-2 code (see also field 'location' of Bank resource). If you want to extend or limit the available bank groups for your client, please contact your Sys-Admin.<br/><br/>Note: There is no bank group for international institutes (i.e. institutes that are not bound to any specific country). Instead, those institutes are always available. If this list is empty, it means that ONLY international institutes are available. # noqa: E501
:return: The available_bank_groups of this ClientConfiguration. # noqa: E501
:rtype: list[str]
"""
return self._available_bank_groups
@available_bank_groups.setter
def available_bank_groups(self, available_bank_groups):
"""Sets the available_bank_groups of this ClientConfiguration.
List of bank groups that are available to this client. A bank group is a collection of all banks that are located in a certain country, and is defined by the country's ISO 3166 ALPHA-2 code (see also field 'location' of Bank resource). If you want to extend or limit the available bank groups for your client, please contact your Sys-Admin.<br/><br/>Note: There is no bank group for international institutes (i.e. institutes that are not bound to any specific country). Instead, those institutes are always available. If this list is empty, it means that ONLY international institutes are available. # noqa: E501
:param available_bank_groups: The available_bank_groups of this ClientConfiguration. # noqa: E501
:type: list[str]
"""
if available_bank_groups is None:
raise ValueError("Invalid value for `available_bank_groups`, must not be `None`") # noqa: E501
self._available_bank_groups = available_bank_groups
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, ClientConfiguration):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""Returns true if both objects are not equal"""
return not self == other
| 71.027431
| 1,696
| 0.743066
| 4,091
| 28,482
| 4.988511
| 0.099487
| 0.025088
| 0.05586
| 0.048412
| 0.88235
| 0.836682
| 0.798216
| 0.755096
| 0.732213
| 0.66273
| 0
| 0.010876
| 0.202654
| 28,482
| 400
| 1,697
| 71.205
| 0.887759
| 0.636332
| 0
| 0.077844
| 1
| 0
| 0.148253
| 0.103057
| 0
| 0
| 0
| 0
| 0
| 1
| 0.167665
| false
| 0
| 0.017964
| 0
| 0.305389
| 0.011976
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
ffe516953bedc8e02aa7624b4a14d347ba8dad15
| 52,397
|
py
|
Python
|
ambulance/tests/test_calls.py
|
aschrist/WebServerAndClient
|
3aa0af2c444acac88a1b51b4cfd4bb8d0c36e640
|
[
"BSD-3-Clause"
] | null | null | null |
ambulance/tests/test_calls.py
|
aschrist/WebServerAndClient
|
3aa0af2c444acac88a1b51b4cfd4bb8d0c36e640
|
[
"BSD-3-Clause"
] | null | null | null |
ambulance/tests/test_calls.py
|
aschrist/WebServerAndClient
|
3aa0af2c444acac88a1b51b4cfd4bb8d0c36e640
|
[
"BSD-3-Clause"
] | null | null | null |
import logging
import time
from django.test import Client
from django.conf import settings
from django.urls import reverse
from django.db import IntegrityError
from django.utils import timezone
from rest_framework.parsers import JSONParser
from rest_framework import serializers
from io import BytesIO
import json
from ambulance.models import Call, Patient, AmbulanceCall, CallStatus, CallPriority, \
AmbulanceUpdate, AmbulanceStatus, Waypoint, Location, LocationType, WaypointStatus, AmbulanceCallStatus
from ambulance.serializers import CallSerializer, AmbulanceCallSerializer, PatientSerializer, \
AmbulanceUpdateSerializer, WaypointSerializer, LocationSerializer
from emstrack.tests.util import date2iso, point2str
from login.tests.setup_data import TestSetup
logger = logging.getLogger(__name__)
class TestCall(TestSetup):
def test_patient_serializer(self):
# test PatientSerializer
c1 = Call.objects.create(updated_by=self.u1)
# serialization
p1 = Patient.objects.create(call=c1)
serializer = PatientSerializer(p1)
result = {
'id': p1.id,
'name': '',
'age': None
}
self.assertDictEqual(serializer.data, result)
# deserialization
data = {
'name': 'Jose',
'age': 3
}
serializer = PatientSerializer(data=data)
self.assertTrue(serializer.is_valid())
serializer.save(call_id=c1.id)
p1 = Patient.objects.get(name='Jose')
serializer = PatientSerializer(p1)
result = {
'id': p1.id,
'name': 'Jose',
'age': 3
}
self.assertDictEqual(serializer.data, result)
# deserialization
data = {
'name': 'Maria',
}
serializer = PatientSerializer(data=data)
self.assertTrue(serializer.is_valid())
serializer.save(call_id=c1.id)
p1 = Patient.objects.get(name='Maria')
serializer = PatientSerializer(p1)
result = {
'id': p1.id,
'name': 'Maria',
'age': None
}
self.assertDictEqual(serializer.data, result)
def test_location_serializer(self):
wpl_1 = Location.objects.create(type=LocationType.i.name, updated_by=self.u1)
serializer = LocationSerializer(wpl_1)
result = {
'id': wpl_1.id,
'type': LocationType.i.name,
'location': point2str(wpl_1.location),
'number': wpl_1.number,
'street': wpl_1.street,
'unit': wpl_1.unit,
'neighborhood': wpl_1.neighborhood,
'city': wpl_1.city,
'state': wpl_1.state,
'zipcode': wpl_1.zipcode,
'country': wpl_1.country,
'name': wpl_1.name,
'comment': wpl_1.comment,
'updated_by': wpl_1.updated_by.id,
'updated_on': date2iso(wpl_1.updated_on)
}
self.assertDictEqual(serializer.data, result)
wpl_2 = Location.objects.create(type=LocationType.h.name, number='123', street='adsasd', updated_by=self.u1)
serializer = LocationSerializer(wpl_2)
result = {
'id': wpl_2.id,
'type': LocationType.h.name,
'location': point2str(wpl_2.location),
'number': '123',
'street': 'adsasd',
'unit': wpl_2.unit,
'neighborhood': wpl_2.neighborhood,
'city': wpl_2.city,
'state': wpl_2.state,
'zipcode': wpl_2.zipcode,
'country': wpl_2.country,
'name': wpl_2.name,
'comment': wpl_2.comment,
'updated_by': wpl_2.updated_by.id,
'updated_on': date2iso(wpl_2.updated_on)
}
self.assertDictEqual(serializer.data, result)
def test_waypoint_serializer(self):
# create call
c_1 = Call.objects.create(updated_by=self.u1)
# create ambulance call
ac_1 = AmbulanceCall.objects.create(call=c_1, ambulance=self.a1, updated_by=self.u1)
# serialization
wpl_1 = Location.objects.create(type=LocationType.i.name, updated_by=self.u1)
wpl_1_serializer = LocationSerializer(wpl_1)
wp_1 = Waypoint.objects.create(ambulance_call=ac_1, order=0, status=WaypointStatus.C.name,
location=wpl_1, updated_by=self.u1)
serializer = WaypointSerializer(wp_1)
result = {
'id': wp_1.id,
'ambulance_call_id': ac_1.id,
'order': 0,
'status': WaypointStatus.C.name,
'location': wpl_1_serializer.data,
'comment': wp_1.comment,
'updated_by': wp_1.updated_by.id,
'updated_on': date2iso(wp_1.updated_on)
}
self.assertDictEqual(serializer.data, result)
result = {
'id': wpl_1.id,
'type': LocationType.i.name,
'location': point2str(wpl_1.location),
'number': wpl_1.number,
'street': wpl_1.street,
'unit': wpl_1.unit,
'neighborhood': wpl_1.neighborhood,
'city': wpl_1.city,
'state': wpl_1.state,
'zipcode': wpl_1.zipcode,
'country': wpl_1.country,
'name': wpl_1.name,
'comment': wpl_1.comment,
'updated_by': wpl_1.updated_by.id,
'updated_on': date2iso(wpl_1.updated_on)
}
self.assertDictEqual(serializer.data['location'], result)
# serialization
wpl_2 = Location.objects.create(type=LocationType.h.name, number='123', street='adsasd', updated_by=self.u1)
wpl_2_serializer = LocationSerializer(wpl_2)
wp_2 = Waypoint.objects.create(ambulance_call=ac_1, order=1, status=WaypointStatus.D.name,
location=wpl_2, updated_by=self.u1)
serializer = WaypointSerializer(wp_2)
result = {
'id': wp_2.id,
'ambulance_call_id': ac_1.id,
'order': 1,
'status': WaypointStatus.D.name,
'location': wpl_2_serializer.data,
'comment': wp_2.comment,
'updated_by': wp_2.updated_by.id,
'updated_on': date2iso(wp_2.updated_on)
}
self.assertDictEqual(serializer.data, result)
result = {
'id': wpl_2.id,
'type': LocationType.h.name,
'location': point2str(wpl_2.location),
'number': '123',
'street': 'adsasd',
'unit': wpl_2.unit,
'neighborhood': wpl_2.neighborhood,
'city': wpl_2.city,
'state': wpl_2.state,
'zipcode': wpl_2.zipcode,
'country': wpl_2.country,
'name': wpl_2.name,
'comment': wpl_2.comment,
'updated_by': wpl_2.updated_by.id,
'updated_on': date2iso(wpl_2.updated_on)
}
self.assertDictEqual(serializer.data['location'], result)
def test_waypoint_serializer_create(self):
# create call
c_1 = Call.objects.create(updated_by=self.u1)
# create ambulance call
ac_1 = AmbulanceCall.objects.create(call=c_1, ambulance=self.a1, updated_by=self.u1)
# serialization
data = {
'order': 0,
'status': WaypointStatus.C.name,
'location': {
'type': LocationType.i.name
}
}
serializer = WaypointSerializer(data=data)
serializer.is_valid()
wp_1 = serializer.save(updated_by=self.u1, ambulance_call_id=ac_1.id)
wpl_1 = wp_1.location
wpl_1_serializer = LocationSerializer(wpl_1)
serializer = WaypointSerializer(wp_1)
result = {
'id': wp_1.id,
'ambulance_call_id': ac_1.id,
'order': 0,
'status': WaypointStatus.C.name,
'location': wpl_1_serializer.data,
'comment': wp_1.comment,
'updated_by': wp_1.updated_by.id,
'updated_on': date2iso(wp_1.updated_on)
}
self.assertDictEqual(serializer.data, result)
result = {
'id': wpl_1.id,
'type': LocationType.i.name,
'location': point2str(wpl_1.location),
'number': wpl_1.number,
'street': wpl_1.street,
'unit': wpl_1.unit,
'neighborhood': wpl_1.neighborhood,
'city': wpl_1.city,
'state': wpl_1.state,
'zipcode': wpl_1.zipcode,
'country': wpl_1.country,
'name': wpl_1.name,
'comment': wpl_1.comment,
'updated_by': wpl_1.updated_by.id,
'updated_on': date2iso(wpl_1.updated_on)
}
self.assertDictEqual(serializer.data['location'], result)
# serialization
data = {
'order': 1,
'status': WaypointStatus.V.name,
'location': {
'id': self.h1.id,
'type': LocationType.h.name
}
}
serializer = WaypointSerializer(data=data)
serializer.is_valid()
wp_2 = serializer.save(updated_by=self.u1,
ambulance_call_id=ac_1.id, publish=True)
wpl_2 = self.h1.location_ptr
wpl_2_serializer = LocationSerializer(wpl_2)
logger.debug(wpl_2_serializer.data)
serializer = WaypointSerializer(wp_2)
logger.debug(serializer.data['location'])
result = {
'id': wp_2.id,
'ambulance_call_id': ac_1.id,
'order': 1,
'status': WaypointStatus.V.name,
'location': wpl_2_serializer.data,
'comment': wp_2.comment,
'updated_by': wp_2.updated_by.id,
'updated_on': date2iso(wp_2.updated_on)
}
self.maxDiff = None
self.assertDictEqual(serializer.data, result)
result = {
'id': wpl_2.id,
'type': LocationType.h.name,
'location': point2str(wpl_2.location),
'number': wpl_2.number,
'street': wpl_2.street,
'unit': wpl_2.unit,
'neighborhood': wpl_2.neighborhood,
'city': wpl_2.city,
'state': wpl_2.state,
'zipcode': wpl_2.zipcode,
'country': wpl_2.country,
'name': wpl_2.name,
'comment': wpl_2.comment,
'updated_by': wpl_2.updated_by.id,
'updated_on': date2iso(wpl_2.updated_on)
}
self.assertDictEqual(serializer.data['location'], result)
# try to create hospital waypoint
data = {
'order': 1,
'status': WaypointStatus.V.name,
'location': {
'type': LocationType.h.name
}
}
serializer = WaypointSerializer(data=data)
serializer.is_valid()
self.assertRaises(serializers.ValidationError, serializer.save, updated_by=self.u1, ambulance_call_id=ac_1.id)
# try to create waypoint without location
data = {
'order': 1,
'status': WaypointStatus.V.name,
}
serializer = WaypointSerializer(data=data)
serializer.is_valid()
self.assertRaises(serializers.ValidationError, serializer.save, updated_by=self.u1, ambulance_call_id=ac_1.id)
def test_waypoint_serializer_update(self):
# create call
c_1 = Call.objects.create(updated_by=self.u1)
# create ambulance call
ac_1 = AmbulanceCall.objects.create(call=c_1, ambulance=self.a1, updated_by=self.u1)
# waypoint creation
wpl_1 = Location.objects.create(type=LocationType.i.name, updated_by=self.u1)
wp_1 = Waypoint.objects.create(ambulance_call=ac_1, order=0, status=WaypointStatus.C.name,
location=wpl_1, updated_by=self.u1)
wpl_2 = Location.objects.create(type=LocationType.w.name, number='123', street='adsasd', updated_by=self.u1)
wp_2 = Waypoint.objects.create(ambulance_call=ac_1, order=1, status=WaypointStatus.D.name,
location=wpl_2, updated_by=self.u1)
wpl_3 = self.h1.location_ptr
wp_3 = Waypoint.objects.create(ambulance_call=ac_1, order=1, status=WaypointStatus.V.name,
location=wpl_3, updated_by=self.u1)
wpl_1_serializer = LocationSerializer(wpl_1)
serializer = WaypointSerializer(wp_1)
result = {
'id': wp_1.id,
'ambulance_call_id': ac_1.id,
'order': 0,
'status': WaypointStatus.C.name,
'location': wpl_1_serializer.data,
'comment': wp_1.comment,
'updated_by': wp_1.updated_by.id,
'updated_on': date2iso(wp_1.updated_on)
}
self.assertDictEqual(serializer.data, result)
result = {
'id': wpl_1.id,
'type': LocationType.i.name,
'location': point2str(wpl_1.location),
'number': wpl_1.number,
'street': wpl_1.street,
'unit': wpl_1.unit,
'neighborhood': wpl_1.neighborhood,
'city': wpl_1.city,
'state': wpl_1.state,
'zipcode': wpl_1.zipcode,
'country': wpl_1.country,
'name': wpl_1.name,
'comment': wpl_1.comment,
'updated_by': wpl_1.updated_by.id,
'updated_on': date2iso(wpl_1.updated_on)
}
self.assertDictEqual(serializer.data['location'], result)
# update waypoint 1
data = {
'order': 1,
'status': WaypointStatus.V.name
}
serializer = WaypointSerializer(wp_1, data=data)
serializer.is_valid()
logger.debug(serializer.errors)
wp_1 = serializer.save(updated_by=self.u1)
wpl_1_serializer = LocationSerializer(wpl_1)
serializer = WaypointSerializer(wp_1)
result = {
'id': wp_1.id,
'ambulance_call_id': ac_1.id,
'order': 1,
'status': WaypointStatus.V.name,
'location': wpl_1_serializer.data,
'comment': wp_1.comment,
'updated_by': wp_1.updated_by.id,
'updated_on': date2iso(wp_1.updated_on)
}
self.assertDictEqual(serializer.data, result)
result = {
'id': wpl_1.id,
'type': LocationType.i.name,
'location': point2str(wpl_1.location),
'number': wpl_1.number,
'street': wpl_1.street,
'unit': wpl_1.unit,
'neighborhood': wpl_1.neighborhood,
'city': wpl_1.city,
'state': wpl_1.state,
'zipcode': wpl_1.zipcode,
'country': wpl_1.country,
'name': wpl_1.name,
'comment': wpl_1.comment,
'updated_by': wpl_1.updated_by.id,
'updated_on': date2iso(wpl_1.updated_on)
}
self.assertDictEqual(serializer.data['location'], result)
# update waypoint 2
data = {
'order': 2,
'status': WaypointStatus.C.name
}
serializer = WaypointSerializer(wp_2, data=data)
serializer.is_valid()
logger.debug(serializer.errors)
wp_2 = serializer.save(updated_by=self.u1)
wpl_2_serializer = LocationSerializer(wpl_2)
serializer = WaypointSerializer(wp_2)
result = {
'id': wp_2.id,
'ambulance_call_id': ac_1.id,
'order': 2,
'status': WaypointStatus.C.name,
'location': wpl_2_serializer.data,
'comment': wp_2.comment,
'updated_by': wp_2.updated_by.id,
'updated_on': date2iso(wp_2.updated_on)
}
self.assertDictEqual(serializer.data, result)
result = {
'id': wpl_2.id,
'type': LocationType.w.name,
'location': point2str(wpl_2.location),
'number': wpl_2.number,
'street': wpl_2.street,
'unit': wpl_2.unit,
'neighborhood': wpl_2.neighborhood,
'city': wpl_2.city,
'state': wpl_2.state,
'zipcode': wpl_2.zipcode,
'country': wpl_2.country,
'name': wpl_2.name,
'comment': wpl_2.comment,
'updated_by': wpl_2.updated_by.id,
'updated_on': date2iso(wpl_2.updated_on)
}
self.assertDictEqual(dict(serializer.data['location']), result)
# update waypoint 3
data = {
'order': 2,
'status': WaypointStatus.C.name,
'location': {
'id': 20,
'type': LocationType.h.name
}
}
serializer = WaypointSerializer(wp_3, data=data)
serializer.is_valid()
logger.debug(serializer.errors)
self.assertRaises(serializers.ValidationError, serializer.save, updated_by=self.u1)
def test_call_serializer(self):
# create call
c1 = Call.objects.create(updated_by=self.u1)
# it is fine to have no ambulances because it is pending
serializer = CallSerializer(c1)
expected = {
'id': c1.id,
'status': c1.status,
'details': c1.details,
'priority': c1.priority,
'created_at': date2iso(c1.created_at),
'pending_at': date2iso(c1.pending_at),
'started_at': date2iso(c1.started_at),
'ended_at': date2iso(c1.ended_at),
'comment': c1.comment,
'updated_by': c1.updated_by.id,
'updated_on': date2iso(c1.updated_on),
'ambulancecall_set': [],
'patient_set': []
}
self.assertDictEqual(serializer.data, expected)
# create first ambulance call
ambulance_call_1 = AmbulanceCall.objects.create(call=c1, ambulance=self.a1, updated_by=self.u1)
ambulance_call = ambulance_call_1
serializer = AmbulanceCallSerializer(ambulance_call)
expected = {
'id': ambulance_call.id,
'ambulance_id': ambulance_call.ambulance.id,
'comment': ambulance_call.comment,
'updated_by': ambulance_call.updated_by.id,
'updated_on': date2iso(ambulance_call.updated_on),
'status': ambulance_call.status,
'waypoint_set': []
}
self.assertDictEqual(serializer.data, expected)
serializer = CallSerializer(c1)
ambulance_call_serializer_1 = AmbulanceCallSerializer(ambulance_call_1)
expected = {
'id': c1.id,
'status': c1.status,
'details': c1.details,
'priority': c1.priority,
'created_at': date2iso(c1.created_at),
'pending_at': date2iso(c1.pending_at),
'started_at': date2iso(c1.started_at),
'ended_at': date2iso(c1.ended_at),
'comment': c1.comment,
'updated_by': c1.updated_by.id,
'updated_on': date2iso(c1.updated_on),
'ambulancecall_set': [],
'patient_set': []
}
self.assertCountEqual(serializer.data['ambulancecall_set'], [ambulance_call_serializer_1.data])
result = serializer.data
result['ambulancecall_set'] = []
self.assertDictEqual(result, expected)
# set accepted
ambulance_call_1.status = AmbulanceCallStatus.A.name
ambulance_call_1.save()
ambulance_call_serializer_1 = AmbulanceCallSerializer(ambulance_call_1)
expected = {
'id': ambulance_call.id,
'ambulance_id': ambulance_call.ambulance.id,
'comment': ambulance_call.comment,
'updated_by': ambulance_call.updated_by.id,
'updated_on': date2iso(ambulance_call.updated_on),
'status': AmbulanceCallStatus.A.name,
'waypoint_set': []
}
self.assertDictEqual(ambulance_call_serializer_1.data, expected)
# create second ambulance call
ambulance_call_2 = AmbulanceCall.objects.create(call=c1, ambulance=self.a3, updated_by=self.u1)
ambulance_call = ambulance_call_2
serializer = AmbulanceCallSerializer(ambulance_call)
expected = {
'id': ambulance_call.id,
'ambulance_id': ambulance_call.ambulance.id,
'comment': ambulance_call.comment,
'updated_by': ambulance_call.updated_by.id,
'updated_on': date2iso(ambulance_call.updated_on),
'status': ambulance_call.status,
'waypoint_set': []
}
self.assertDictEqual(serializer.data, expected)
serializer = CallSerializer(c1)
ambulance_call_serializer_2 = AmbulanceCallSerializer(ambulance_call_2)
expected = {
'id': c1.id,
'status': c1.status,
'details': c1.details,
'priority': c1.priority,
'created_at': date2iso(c1.created_at),
'pending_at': date2iso(c1.pending_at),
'started_at': date2iso(c1.started_at),
'ended_at': date2iso(c1.ended_at),
'comment': c1.comment,
'updated_by': c1.updated_by.id,
'updated_on': date2iso(c1.updated_on),
'ambulancecall_set': [],
'patient_set': []
}
self.assertCountEqual(serializer.data['ambulancecall_set'],
[ambulance_call_serializer_2.data, ambulance_call_serializer_1.data])
result = serializer.data
result['ambulancecall_set'] = []
self.assertDictEqual(result, expected)
# set accepted
ambulance_call_2.status = AmbulanceCallStatus.A.name
ambulance_call_2.save()
ambulance_call_serializer_2 = AmbulanceCallSerializer(ambulance_call_2)
expected = {
'id': ambulance_call.id,
'ambulance_id': ambulance_call.ambulance.id,
'comment': ambulance_call.comment,
'updated_by': ambulance_call.updated_by.id,
'updated_on': date2iso(ambulance_call.updated_on),
'status': AmbulanceCallStatus.A.name,
'waypoint_set': []
}
self.assertDictEqual(ambulance_call_serializer_2.data, expected)
# Add waypoints to ambulancecalls
wpl_1 = Location.objects.create(type=LocationType.i.name, updated_by=self.u1)
wp_1 = Waypoint.objects.create(ambulance_call=ambulance_call_1, order=0, status=WaypointStatus.C.name,
location=wpl_1, updated_by=self.u1)
wpl_2 = Location.objects.create(type=LocationType.h.name, number='123', street='adsasd', updated_by=self.u2)
wp_2 = Waypoint.objects.create(ambulance_call=ambulance_call_2, order=1, status=WaypointStatus.D.name,
location=wpl_2, updated_by=self.u2)
wp_3 = Waypoint.objects.create(ambulance_call=ambulance_call_2, order=2, status=WaypointStatus.V.name,
location=self.h1, updated_by=self.u2)
# create ambulance update to use in event
self.a1.status = AmbulanceStatus.PB.name
self.a1.timestamp = timezone.now()
self.a1.save()
ambulance_update_1 = AmbulanceUpdate.objects.get(status=AmbulanceStatus.PB.name)
# set suspended
ambulance_call_1.status = AmbulanceCallStatus.S.name
ambulance_call_1.save()
self.a1.status = AmbulanceStatus.AP.name
self.a1.timestamp = timezone.now()
self.a1.save()
ambulance_update_2 = AmbulanceUpdate.objects.get(status=AmbulanceStatus.AP.name)
# set accepted
ambulance_call_1.status = AmbulanceCallStatus.A.name
ambulance_call_1.save()
self.a1status = AmbulanceStatus.HB.name
self.a1.timestamp = timezone.now()
self.a1.save()
serializer = CallSerializer(c1)
ambulance_call_serializer_1 = AmbulanceCallSerializer(ambulance_call_1)
ambulance_call_serializer_2 = AmbulanceCallSerializer(ambulance_call_2)
expected = {
'id': c1.id,
'status': c1.status,
'details': c1.details,
'priority': c1.priority,
'created_at': date2iso(c1.created_at),
'pending_at': date2iso(c1.pending_at),
'started_at': date2iso(c1.started_at),
'ended_at': date2iso(c1.ended_at),
'comment': c1.comment,
'updated_by': c1.updated_by.id,
'updated_on': date2iso(c1.updated_on),
'ambulancecall_set': [],
'patient_set': []
}
self.assertCountEqual(serializer.data['ambulancecall_set'],
[ambulance_call_serializer_2.data, ambulance_call_serializer_1.data])
result = serializer.data
result['ambulancecall_set'] = []
self.assertDictEqual(result, expected)
wp_1_serializer = WaypointSerializer(wp_1)
result = {
'id': wp_1.id,
'ambulance_call_id': ambulance_call_1.id,
'order': 0,
'status': WaypointStatus.C.name,
'location': LocationSerializer(wpl_1).data,
'comment': wp_1.comment,
'updated_by': wp_1.updated_by.id,
'updated_on': date2iso(wp_1.updated_on)
}
self.assertDictEqual(wp_1_serializer.data, result)
result = {
'id': wpl_1.id,
'type': LocationType.i.name,
'location': point2str(wpl_1.location),
'number': wpl_1.street,
'street': wpl_1.street,
'unit': wpl_1.unit,
'neighborhood': wpl_1.neighborhood,
'city': wpl_1.city,
'state': wpl_1.state,
'zipcode': wpl_1.zipcode,
'country': wpl_1.country,
'name': wpl_1.name,
'comment': wpl_1.comment,
'updated_by': wpl_1.updated_by.id,
'updated_on': date2iso(wpl_1.updated_on)
}
self.assertDictEqual(wp_1_serializer.data['location'], result)
wp_2_serializer = WaypointSerializer(wp_2)
result = {
'id': wp_2.id,
'ambulance_call_id': ambulance_call_2.id,
'order': 1,
'status': WaypointStatus.D.name,
'location': LocationSerializer(wpl_2).data,
'comment': wp_2.comment,
'updated_by': wp_2.updated_by.id,
'updated_on': date2iso(wp_2.updated_on)
}
self.assertDictEqual(wp_2_serializer.data, result)
result = {
'id': wpl_2.id,
'type': LocationType.h.name,
'location': point2str(wpl_2.location),
'number': '123',
'street': 'adsasd',
'unit': wpl_2.unit,
'neighborhood': wpl_2.neighborhood,
'city': wpl_2.city,
'state': wpl_2.state,
'zipcode': wpl_2.zipcode,
'country': wpl_2.country,
'name': wpl_2.name,
'comment': wpl_2.comment,
'updated_by': wpl_2.updated_by.id,
'updated_on': date2iso(wpl_2.updated_on)
}
self.assertDictEqual(wp_2_serializer.data['location'], result)
wp_3_serializer = WaypointSerializer(wp_3)
result = {
'id': wp_3.id,
'ambulance_call_id': ambulance_call_2.id,
'order': 2,
'status': WaypointStatus.V.name,
'location': LocationSerializer(self.h1).data,
'comment': wp_3.comment,
'updated_by': wp_3.updated_by.id,
'updated_on': date2iso(wp_3.updated_on)
}
self.assertDictEqual(wp_3_serializer.data, result)
result = {
'id': self.h1.id,
'type': LocationType.h.name,
'location': point2str(self.h1.location),
'number': self.h1.number,
'street': self.h1.street,
'unit': self.h1.unit,
'neighborhood': self.h1.neighborhood,
'city': self.h1.city,
'state': self.h1.state,
'zipcode': self.h1.zipcode,
'country': self.h1.country,
'name': self.h1.name,
'comment': self.h1.comment,
'updated_by': self.h1.updated_by.id,
'updated_on': date2iso(self.h1.updated_on)
}
self.assertDictEqual(wp_3_serializer.data['location'], result)
# add patients
p1 = Patient.objects.create(call=c1, name='Jose', age=3)
p2 = Patient.objects.create(call=c1, name='Maria', age=4)
patient_serializer_1 = PatientSerializer(p1)
patient_serializer_2 = PatientSerializer(p2)
serializer = CallSerializer(c1)
expected = {
'id': c1.id,
'status': c1.status,
'details': c1.details,
'priority': c1.priority,
'created_at': date2iso(c1.created_at),
'pending_at': date2iso(c1.pending_at),
'started_at': date2iso(c1.started_at),
'ended_at': date2iso(c1.ended_at),
'comment': c1.comment,
'updated_by': c1.updated_by.id,
'updated_on': date2iso(c1.updated_on),
'ambulancecall_set': [],
'patient_set': []
}
self.assertCountEqual(serializer.data['ambulancecall_set'],
[ambulance_call_serializer_2.data, ambulance_call_serializer_1.data])
self.assertCountEqual(serializer.data['patient_set'],
[patient_serializer_2.data, patient_serializer_1.data])
result = serializer.data
result['ambulancecall_set'] = []
result['patient_set'] = []
self.assertDictEqual(result, expected)
# retrieve ambulance updates
queryset = AmbulanceUpdate\
.objects.filter(ambulance=self.a1.id)\
.filter(timestamp__gte=ambulance_update_1.timestamp)\
.exclude(id=ambulance_update_2.id)
answer1 = []
for u in queryset:
serializer = AmbulanceUpdateSerializer(u)
result = {
'id': u.id,
'ambulance_id': u.ambulance.id,
'ambulance_identifier': u.ambulance.identifier,
'comment': u.comment,
'status': u.status,
'orientation': u.orientation,
'location': point2str(u.location),
'timestamp': date2iso(u.timestamp),
'updated_by_username': u.updated_by.username,
'updated_on': date2iso(u.updated_on)
}
answer1.append(serializer.data)
logger.debug(answer1)
self.assertEqual(len(answer1), 2)
# instantiate client
client = Client()
# login as admin
client.login(username=settings.MQTT['USERNAME'], password=settings.MQTT['PASSWORD'])
# retrieve ambulances updates
response = client.get('/api/ambulance/{}/updates/?call_id={}'.format(self.a1.id, c1.id),
follow=True)
self.assertEqual(response.status_code, 200)
result = JSONParser().parse(BytesIO(response.content))
logger.debug(result)
logger.debug(answer1)
self.assertCountEqual(result, answer1)
# logout
client.logout()
# cannot have duplicate
# This must be last
self.assertRaises(IntegrityError, AmbulanceCall.objects.create, call=c1, ambulance=self.a1, updated_by=self.u1)
def test_call_serializer_create(self):
call = {
'status': CallStatus.P.name,
'priority': CallPriority.B.name,
'ambulancecall_set': [],
'patient_set': []
}
serializer = CallSerializer(data=call)
serializer.is_valid()
call = serializer.save(updated_by=self.u1)
# test CallSerializer
c1 = Call.objects.get(id=call.id)
serializer = CallSerializer(c1)
result = {
'id': c1.id,
'status': c1.status,
'details': c1.details,
'priority': c1.priority,
'created_at': date2iso(c1.created_at),
'pending_at': date2iso(c1.pending_at),
'started_at': date2iso(c1.started_at),
'ended_at': date2iso(c1.ended_at),
'comment': c1.comment,
'updated_by': c1.updated_by.id,
'updated_on': date2iso(c1.updated_on),
'ambulancecall_set': [],
'patient_set': []
}
self.assertDictEqual(serializer.data, result)
# accepted Call without Ambulancecall_Set fails
call = {
'status': CallStatus.S.name,
'priority': CallPriority.B.name,
'patient_set': []
}
serializer = CallSerializer(data=call)
self.assertFalse(serializer.is_valid())
# Pending Call with Ambulancecall_Set will create ambulancecalls
call = {
'status': CallStatus.P.name,
'priority': CallPriority.B.name,
'ambulancecall_set': [
{
'ambulance_id': self.a1.id,
'waypoint_set': [
{
'order': 0,
'location': {
'type': LocationType.i.name,
'number': '123',
'street': 'some street'
}
},
{
'order': 1,
'status': WaypointStatus.D.name,
'location': {
'type': LocationType.w.name,
'location': {
'longitude': -110.54,
'latitude': 35.75
}
}
}
]
},
{
'ambulance_id': self.a2.id,
'waypoint_set': [
{
'order': 0,
'location': {
'type': LocationType.i.name,
'number': '321',
'street': 'another street'
}
}
]
}
],
'patient_set': []
}
serializer = CallSerializer(data=call)
serializer.is_valid()
call = serializer.save(updated_by=self.u1)
# test CallSerializer
c1 = Call.objects.get(id=call.id)
serializer = CallSerializer(c1)
expected_ambulancecall_set = [
AmbulanceCallSerializer(
AmbulanceCall.objects.get(call_id=c1.id,
ambulance_id=self.a1.id)).data,
AmbulanceCallSerializer(
AmbulanceCall.objects.get(call_id=c1.id,
ambulance_id=self.a2.id)).data
]
expected = {
'id': c1.id,
'status': c1.status,
'details': c1.details,
'priority': c1.priority,
'created_at': date2iso(c1.created_at),
'pending_at': date2iso(c1.pending_at),
'started_at': date2iso(c1.started_at),
'ended_at': date2iso(c1.ended_at),
'comment': c1.comment,
'updated_by': c1.updated_by.id,
'updated_on': date2iso(c1.updated_on),
'ambulancecall_set': expected_ambulancecall_set,
'patient_set': []
}
result = serializer.data
# logger.debug(result['ambulancecall_set'])
# logger.debug(expected['ambulancecall_set'])
self.assertCountEqual(result['ambulancecall_set'],
expected['ambulancecall_set'])
expected['ambulancecall_set'] = []
result['ambulancecall_set'] = []
self.assertDictEqual(result, expected)
# logger.debug(expected_ambulancecall_set[0])
# logger.debug(expected_ambulancecall_set[1])
self.assertEqual(len(expected_ambulancecall_set[0]['waypoint_set']), 2)
self.assertEqual(len(expected_ambulancecall_set[1]['waypoint_set']), 1)
# Pending Call with ambulancecall_set and patient_set
call = {
'status': CallStatus.P.name,
'priority': CallPriority.B.name,
'ambulancecall_set': [{'ambulance_id': self.a1.id}, {'ambulance_id': self.a2.id}],
'patient_set': [{'name': 'Jose', 'age': 3}, {'name': 'Maria', 'age': 10}]
}
serializer = CallSerializer(data=call)
serializer.is_valid()
call = serializer.save(updated_by=self.u1)
# test CallSerializer
c1 = Call.objects.get(id=call.id)
serializer = CallSerializer(c1)
expected_patient_set = PatientSerializer(Patient.objects.filter(call_id=c1.id), many=True).data
expected_ambulancecall_set = AmbulanceCallSerializer(AmbulanceCall.objects.filter(call_id=c1.id), many=True).data
expected = {
'id': c1.id,
'status': c1.status,
'details': c1.details,
'priority': c1.priority,
'created_at': date2iso(c1.created_at),
'pending_at': date2iso(c1.pending_at),
'started_at': date2iso(c1.started_at),
'ended_at': date2iso(c1.ended_at),
'comment': c1.comment,
'updated_by': c1.updated_by.id,
'updated_on': date2iso(c1.updated_on),
'ambulancecall_set': expected_ambulancecall_set,
'patient_set': expected_patient_set
}
result = serializer.data
self.assertCountEqual(result['ambulancecall_set'],
expected['ambulancecall_set'])
self.assertCountEqual(result['patient_set'],
expected['patient_set'])
expected['ambulancecall_set'] = []
result['ambulancecall_set'] = []
expected['patient_set'] = []
result['patient_set'] = []
self.assertDictEqual(result, expected)
# Should fail because ambulance id's are repeated
call = {
'status': CallStatus.S.name,
'priority': CallPriority.B.name,
'ambulancecall_set': [{'ambulance_id': self.a1.id}, {'ambulance_id': self.a1.id}],
'patient_set': []
}
serializer = CallSerializer(data=call)
serializer.is_valid()
self.assertRaises(IntegrityError, serializer.save, updated_by=self.u1)
# make sure no call was created
self.assertRaises(Call.DoesNotExist, Call.objects.get, status=CallStatus.S.name, priority=CallPriority.B.name)
# THESE ARE FAILING!
def _test_call_update_serializer(self):
# superuser first
# Update call status
c = Call.objects.create(updated_by=self.u1)
user = self.u1
status = CallStatus.S.name
serializer = CallSerializer(c,
data={
'status': status
}, partial=True)
serializer.is_valid()
serializer.save(updated_by=user)
# test
serializer = CallSerializer(c)
result = {
'id': c.id,
'status': status,
'details': c.details,
'priority': c.priority,
'created_at': date2iso(c.created_at),
'pending_at': date2iso(c.pending_at),
'started_at': date2iso(c.started_at),
'ended_at': date2iso(c.ended_at),
'comment': c.comment,
'updated_by': c.updated_by.id,
'updated_on': date2iso(c.updated_on),
'ambulancecall_set': AmbulanceCallSerializer(many=True).data,
'patient_set': PatientSerializer(many=True).data
}
self.assertDictEqual(serializer.data, result)
# # Update call street
# street = 'new street'
#
# serializer = CallSerializer(c,
# data={
# 'street': street,
# }, partial=True)
# serializer.is_valid()
# serializer.save(updated_by=user)
#
# # test
# serializer = CallSerializer(c)
# result = {
# 'id': c.id,
# 'status': c.status,
# 'details': c.details,
# 'priority': c.priority,
# 'number': c.number,
# 'street': street,
# 'unit': c.unit,
# 'neighborhood': c.neighborhood,
# 'city': c.city,
# 'state': c.state,
# 'zipcode': c.zipcode,
# 'country': c.country,
# 'location': point2str(c.location),
# 'created_at': date2iso(c.created_at),
# 'pending_at': date2iso(c.pending_at),
# 'started_at': date2iso(c.started_at),
# 'ended_at': date2iso(c.ended_at),
# 'comment': c.comment,
# 'updated_by': c.updated_by.id,
# 'updated_on': date2iso(c.updated_on),
# 'ambulancecall_set': AmbulanceCallSerializer(many=True).data,
# 'patient_set': PatientSerializer(many=True).data
# }
# self.assertDictEqual(serializer.data, result)
#
# # Update call location
# location = {'latitude': -2., 'longitude': 7.}
#
# serializer = CallSerializer(c,
# data={
# 'location': location,
# }, partial=True)
# serializer.is_valid()
# serializer.save(updated_by=user)
#
# # test
# serializer = CallSerializer(c)
# result = {
# 'id': c.id,
# 'status': c.status,
# 'details': c.details,
# 'priority': c.priority,
# 'number': c.number,
# 'street': c.street,
# 'unit': c.unit,
# 'neighborhood': c.neighborhood,
# 'city': c.city,
# 'state': c.state,
# 'zipcode': c.zipcode,
# 'country': c.country,
# 'location': point2str(location),
# 'created_at': date2iso(c.created_at),
# 'pending_at': date2iso(c.pending_at),
# 'started_at': date2iso(c.started_at),
# 'ended_at': date2iso(c.ended_at),
# 'comment': c.comment,
# 'updated_by': c.updated_by.id,
# 'updated_on': date2iso(c.updated_on),
# 'ambulancecall_set': AmbulanceCallSerializer(many=True).data,
# 'patient_set': PatientSerializer(many=True).data
# }
# self.assertDictEqual(serializer.data, result)
# Need more tests for updates by regular authorized user
def test_call_create_viewset(self):
# instantiate client
client = Client()
client.login(username=settings.MQTT['USERNAME'], password=settings.MQTT['PASSWORD'])
data = {
'status': CallStatus.P.name,
'priority': CallPriority.B.name,
'ambulancecall_set': [
{
'ambulance_id': self.a1.id,
'waypoint_set': [
{
'order': 0,
'location': {
'type': LocationType.i.name,
'number': '123',
'street': 'some street'
}
},
{
'order': 1,
'status': WaypointStatus.D.name,
'location': {
'type': LocationType.w.name,
'location': {
'longitude': -110.54,
'latitude': 35.75
}
}
}
]
},
{
'ambulance_id': self.a2.id,
'waypoint_set': [
{
'order': 0,
'location': {
'type': LocationType.i.name,
'number': '321',
'street': 'another street'
}
}
]
}
],
'patient_set': [{'name': 'Jose', 'age': 3}, {'name': 'Maria', 'age': 10}]
}
response = client.post('/api/call/', data, content_type='application/json')
self.assertEqual(response.status_code, 201)
c1 = Call.objects.get(status=CallStatus.P.name)
serializer = CallSerializer(c1)
expected_patient_set = PatientSerializer(Patient.objects.filter(call_id=c1.id), many=True).data
expected_ambulancecall_set = AmbulanceCallSerializer(AmbulanceCall.objects.filter(call_id=c1.id), many=True).data
self.assertEqual(len(expected_patient_set), 2)
self.assertEqual(len(expected_ambulancecall_set[0]['waypoint_set']), 2)
self.assertEqual(len(expected_ambulancecall_set[1]['waypoint_set']), 1)
expected = {
'id': c1.id,
'status': c1.status,
'details': c1.details,
'priority': c1.priority,
'created_at': date2iso(c1.created_at),
'pending_at': date2iso(c1.pending_at),
'started_at': date2iso(c1.started_at),
'ended_at': date2iso(c1.ended_at),
'comment': c1.comment,
'updated_by': c1.updated_by.id,
'updated_on': date2iso(c1.updated_on),
'ambulancecall_set': expected_ambulancecall_set,
'patient_set': expected_patient_set
}
result = serializer.data
self.assertCountEqual(result['ambulancecall_set'],
expected['ambulancecall_set'])
self.assertCountEqual(result['patient_set'],
expected['patient_set'])
expected['ambulancecall_set'] = []
result['ambulancecall_set'] = []
expected['patient_set'] = []
result['patient_set'] = []
self.assertDictEqual(result, expected)
# logout
client.logout()
# login as testuser2
client.login(username='testuser2', password='very_secret')
# Will fail for anyone not superuser
data = {
'status': CallStatus.P.name,
'priority': CallPriority.B.name,
'ambulancecall_set': [{'ambulance_id': self.a1.id}, {'ambulance_id': self.a2.id}],
'patient_set': [{'name': 'Jose', 'age': 3}, {'name': 'Maria', 'age': 10}]
}
response = client.post('/api/call/', data, content_type='application/json')
self.assertEqual(response.status_code, 403)
# logout
client.logout()
def test_call_list_viewset(self):
# instantiate client
client = Client()
client.login(username=settings.MQTT['USERNAME'], password=settings.MQTT['PASSWORD'])
response = client.get('/api/call/', follow=True)
self.assertEquals(response.status_code, 200)
result = JSONParser().parse(BytesIO(response.content))
answer = CallSerializer(Call.objects.all(), many=True).data
self.assertCountEqual(result, answer)
# test_call_list_viewset_one_entry
c1 = Call.objects.create(details='nani', updated_by=self.u1)
response = client.get('/api/call/', follow=True)
self.assertEquals(response.status_code, 200)
result = JSONParser().parse(BytesIO(response.content))
answer = CallSerializer(Call.objects.all(), many=True).data
self.assertCountEqual(result, answer)
# test_call_list_viewset_two_entries:
c2 = Call.objects.create(details='suhmuh', updated_by=self.u1)
response = client.get('/api/call/', follow=True)
self.assertEquals(response.status_code, 200)
result = JSONParser().parse(BytesIO(response.content))
answer = CallSerializer(Call.objects.all(), many=True).data
self.assertCountEqual(result, answer)
# logout
client.logout()
# login as testuser2
client.login(username='testuser2', password='very_secret')
response = client.get('/api/call/', follow=True)
self.assertEquals(response.status_code, 200)
result = JSONParser().parse(BytesIO(response.content))
answer = CallSerializer([], many=True).data
self.assertCountEqual(result, answer)
# add ambulances to calls, can only read a3
AmbulanceCall.objects.create(call=c1, ambulance=self.a3, updated_by=self.u1)
AmbulanceCall.objects.create(call=c2, ambulance=self.a2, updated_by=self.u1)
response = client.get('/api/call/', follow=True)
self.assertEquals(response.status_code, 200)
result = JSONParser().parse(BytesIO(response.content))
answer = CallSerializer([c1], many=True).data
self.assertCountEqual(result, answer)
# logout
client.logout()
def test_call_list_view(self):
# instantiate client
client = Client()
client.login(username=settings.MQTT['USERNAME'], password=settings.MQTT['PASSWORD'])
response = client.get(reverse('ambulance:call_list'))
self.assertEquals(response.status_code, 200)
self.assertTemplateUsed(response, 'ambulance/call_list.html')
# test_call_list_view_one_entry
c1 = Call.objects.create(details='nani', updated_by=self.u1)
response = client.get(reverse('ambulance:call_list'))
self.assertContains(response, 'nani')
# test_call_list_view_two_entries:
c2 = Call.objects.create(details='suhmuh', updated_by=self.u1)
response = client.get(reverse('ambulance:call_list'))
self.assertContains(response, 'nani')
self.assertContains(response, 'suhmuh')
# logout
client.logout()
# login as testuser2
client.login(username='testuser2', password='very_secret')
response = client.get(reverse('ambulance:call_list'))
self.assertEquals(response.status_code, 200)
self.assertNotContains(response, 'nani')
self.assertNotContains(response, 'suhmuh')
# add ambulances to calls, can only read a3
AmbulanceCall.objects.create(call=c1, ambulance=self.a3, updated_by=self.u1)
AmbulanceCall.objects.create(call=c2, ambulance=self.a2, updated_by=self.u1)
response = client.get(reverse('ambulance:call_list'))
self.assertEquals(response.status_code, 200)
self.assertContains(response, 'nani')
self.assertNotContains(response, 'suhmuh')
# logout
client.logout()
def test_call_detail_view(self):
# instantiate client
client = Client()
client.login(username=settings.MQTT['USERNAME'], password=settings.MQTT['PASSWORD'])
c1 = Call.objects.create(details="Test1", updated_by=self.u1)
response = client.get(reverse('ambulance:call_detail', kwargs={'pk': c1.id}))
self.assertEquals(response.status_code, 200)
self.assertTemplateUsed(response, 'ambulance/call_detail.html')
# test_call_detail_view_entry
c1 = Call.objects.create(details="Test1", updated_by=self.u1)
response = client.get(reverse('ambulance:call_detail', kwargs={'pk': c1.id}))
self.assertContains(response, 'Test1')
# TODO: Tests for unprivileged user
# logout
client.logout()
| 37.533668
| 121
| 0.55444
| 5,277
| 52,397
| 5.306424
| 0.052113
| 0.041783
| 0.023213
| 0.024641
| 0.868438
| 0.847082
| 0.82587
| 0.8028
| 0.770766
| 0.745161
| 0
| 0.024668
| 0.328435
| 52,397
| 1,395
| 122
| 37.560573
| 0.771115
| 0.076684
| 0
| 0.744661
| 0
| 0
| 0.113789
| 0.002675
| 0
| 0
| 0
| 0.000717
| 0.085422
| 1
| 0.011142
| false
| 0.007428
| 0.013928
| 0
| 0.025998
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
08432f03ae4911f91726c50919d96811876b71c7
| 9,364
|
py
|
Python
|
midv500/download_dataset.py
|
fcakyon/midv500-to-coco
|
2f1cd74e0bb8da2301a96e3fb0cd9f17005ed08c
|
[
"MIT"
] | 39
|
2020-05-15T17:34:32.000Z
|
2022-03-25T08:22:47.000Z
|
midv500/download_dataset.py
|
fcakyon/midv500-to-coco
|
2f1cd74e0bb8da2301a96e3fb0cd9f17005ed08c
|
[
"MIT"
] | 1
|
2020-08-04T09:04:06.000Z
|
2020-08-19T12:50:15.000Z
|
midv500/download_dataset.py
|
fcakyon/midv500-to-coco
|
2f1cd74e0bb8da2301a96e3fb0cd9f17005ed08c
|
[
"MIT"
] | 6
|
2020-04-23T19:40:16.000Z
|
2021-12-19T17:52:42.000Z
|
import os
import argparse
from midv500.utils import download, unzip
midv500_links = [
"ftp://smartengines.com/midv-500/dataset/01_alb_id.zip",
"ftp://smartengines.com/midv-500/dataset/02_aut_drvlic_new.zip",
"ftp://smartengines.com/midv-500/dataset/03_aut_id_old.zip",
"ftp://smartengines.com/midv-500/dataset/04_aut_id.zip",
"ftp://smartengines.com/midv-500/dataset/05_aze_passport.zip",
"ftp://smartengines.com/midv-500/dataset/06_bra_passport.zip",
"ftp://smartengines.com/midv-500/dataset/07_chl_id.zip",
"ftp://smartengines.com/midv-500/dataset/08_chn_homereturn.zip",
"ftp://smartengines.com/midv-500/dataset/09_chn_id.zip",
"ftp://smartengines.com/midv-500/dataset/10_cze_id.zip",
"ftp://smartengines.com/midv-500/dataset/11_cze_passport.zip",
"ftp://smartengines.com/midv-500/dataset/12_deu_drvlic_new.zip",
"ftp://smartengines.com/midv-500/dataset/13_deu_drvlic_old.zip",
"ftp://smartengines.com/midv-500/dataset/14_deu_id_new.zip",
"ftp://smartengines.com/midv-500/dataset/15_deu_id_old.zip",
"ftp://smartengines.com/midv-500/dataset/16_deu_passport_new.zip",
"ftp://smartengines.com/midv-500/dataset/17_deu_passport_old.zip",
"ftp://smartengines.com/midv-500/dataset/18_dza_passport.zip",
"ftp://smartengines.com/midv-500/dataset/19_esp_drvlic.zip",
"ftp://smartengines.com/midv-500/dataset/20_esp_id_new.zip",
"ftp://smartengines.com/midv-500/dataset/21_esp_id_old.zip",
"ftp://smartengines.com/midv-500/dataset/22_est_id.zip",
"ftp://smartengines.com/midv-500/dataset/23_fin_drvlic.zip",
"ftp://smartengines.com/midv-500/dataset/24_fin_id.zip",
"ftp://smartengines.com/midv-500/dataset/25_grc_passport.zip",
"ftp://smartengines.com/midv-500/dataset/26_hrv_drvlic.zip",
"ftp://smartengines.com/midv-500/dataset/27_hrv_passport.zip",
"ftp://smartengines.com/midv-500/dataset/28_hun_passport.zip",
"ftp://smartengines.com/midv-500/dataset/29_irn_drvlic.zip",
"ftp://smartengines.com/midv-500/dataset/30_ita_drvlic.zip",
"ftp://smartengines.com/midv-500/dataset/31_jpn_drvlic.zip",
"ftp://smartengines.com/midv-500/dataset/32_lva_passport.zip",
"ftp://smartengines.com/midv-500/dataset/33_mac_id.zip",
"ftp://smartengines.com/midv-500/dataset/34_mda_passport.zip",
"ftp://smartengines.com/midv-500/dataset/35_nor_drvlic.zip",
"ftp://smartengines.com/midv-500/dataset/36_pol_drvlic.zip",
"ftp://smartengines.com/midv-500/dataset/37_prt_id.zip",
"ftp://smartengines.com/midv-500/dataset/38_rou_drvlic.zip",
"ftp://smartengines.com/midv-500/dataset/39_rus_internalpassport.zip",
"ftp://smartengines.com/midv-500/dataset/40_srb_id.zip",
"ftp://smartengines.com/midv-500/dataset/41_srb_passport.zip",
"ftp://smartengines.com/midv-500/dataset/42_svk_id.zip",
"ftp://smartengines.com/midv-500/dataset/43_tur_id.zip",
"ftp://smartengines.com/midv-500/dataset/44_ukr_id.zip",
"ftp://smartengines.com/midv-500/dataset/45_ukr_passport.zip",
"ftp://smartengines.com/midv-500/dataset/46_ury_passport.zip",
"ftp://smartengines.com/midv-500/dataset/47_usa_bordercrossing.zip",
"ftp://smartengines.com/midv-500/dataset/48_usa_passportcard.zip",
"ftp://smartengines.com/midv-500/dataset/49_usa_ssn82.zip",
"ftp://smartengines.com/midv-500/dataset/50_xpo_id.zip",
]
midv2019_links = [
"ftp://smartengines.com/midv-500/extra/midv-2019/dataset/01_alb_id.zip",
"ftp://smartengines.com/midv-500/extra/midv-2019/dataset/02_aut_drvlic_new.zip",
"ftp://smartengines.com/midv-500/extra/midv-2019/dataset/03_aut_id_old.zip",
"ftp://smartengines.com/midv-500/extra/midv-2019/dataset/04_aut_id.zip",
"ftp://smartengines.com/midv-500/extra/midv-2019/dataset/05_aze_passport.zip",
"ftp://smartengines.com/midv-500/extra/midv-2019/dataset/06_bra_passport.zip",
"ftp://smartengines.com/midv-500/extra/midv-2019/dataset/07_chl_id.zip",
"ftp://smartengines.com/midv-500/extra/midv-2019/dataset/08_chn_homereturn.zip",
"ftp://smartengines.com/midv-500/extra/midv-2019/dataset/09_chn_id.zip",
"ftp://smartengines.com/midv-500/extra/midv-2019/dataset/10_cze_id.zip",
"ftp://smartengines.com/midv-500/extra/midv-2019/dataset/11_cze_passport.zip",
"ftp://smartengines.com/midv-500/extra/midv-2019/dataset/12_deu_drvlic_new.zip",
"ftp://smartengines.com/midv-500/extra/midv-2019/dataset/13_deu_drvlic_old.zip",
"ftp://smartengines.com/midv-500/extra/midv-2019/dataset/14_deu_id_new.zip",
"ftp://smartengines.com/midv-500/extra/midv-2019/dataset/15_deu_id_old.zip",
"ftp://smartengines.com/midv-500/extra/midv-2019/dataset/16_deu_passport_new.zip",
"ftp://smartengines.com/midv-500/extra/midv-2019/dataset/17_deu_passport_old.zip",
"ftp://smartengines.com/midv-500/extra/midv-2019/dataset/18_dza_passport.zip",
"ftp://smartengines.com/midv-500/extra/midv-2019/dataset/19_esp_drvlic.zip",
"ftp://smartengines.com/midv-500/extra/midv-2019/dataset/20_esp_id_new.zip",
"ftp://smartengines.com/midv-500/extra/midv-2019/dataset/21_esp_id_old.zip",
"ftp://smartengines.com/midv-500/extra/midv-2019/dataset/22_est_id.zip",
"ftp://smartengines.com/midv-500/extra/midv-2019/dataset/23_fin_drvlic.zip",
"ftp://smartengines.com/midv-500/extra/midv-2019/dataset/24_fin_id.zip",
"ftp://smartengines.com/midv-500/extra/midv-2019/dataset/25_grc_passport.zip",
"ftp://smartengines.com/midv-500/extra/midv-2019/dataset/26_hrv_drvlic.zip",
"ftp://smartengines.com/midv-500/extra/midv-2019/dataset/27_hrv_passport.zip",
"ftp://smartengines.com/midv-500/extra/midv-2019/dataset/28_hun_passport.zip",
"ftp://smartengines.com/midv-500/extra/midv-2019/dataset/29_irn_drvlic.zip",
"ftp://smartengines.com/midv-500/extra/midv-2019/dataset/30_ita_drvlic.zip",
"ftp://smartengines.com/midv-500/extra/midv-2019/dataset/31_jpn_drvlic.zip",
"ftp://smartengines.com/midv-500/extra/midv-2019/dataset/32_lva_passport.zip",
"ftp://smartengines.com/midv-500/extra/midv-2019/dataset/33_mac_id.zip",
"ftp://smartengines.com/midv-500/extra/midv-2019/dataset/34_mda_passport.zip",
"ftp://smartengines.com/midv-500/extra/midv-2019/dataset/35_nor_drvlic.zip",
"ftp://smartengines.com/midv-500/extra/midv-2019/dataset/36_pol_drvlic.zip",
"ftp://smartengines.com/midv-500/extra/midv-2019/dataset/37_prt_id.zip",
"ftp://smartengines.com/midv-500/extra/midv-2019/dataset/38_rou_drvlic.zip",
"ftp://smartengines.com/midv-500/extra/midv-2019/dataset/39_rus_internalpassport.zip",
"ftp://smartengines.com/midv-500/extra/midv-2019/dataset/40_srb_id.zip",
"ftp://smartengines.com/midv-500/extra/midv-2019/dataset/41_srb_passport.zip",
"ftp://smartengines.com/midv-500/extra/midv-2019/dataset/42_svk_id.zip",
"ftp://smartengines.com/midv-500/extra/midv-2019/dataset/43_tur_id.zip",
"ftp://smartengines.com/midv-500/extra/midv-2019/dataset/44_ukr_id.zip",
"ftp://smartengines.com/midv-500/extra/midv-2019/dataset/45_ukr_passport.zip",
"ftp://smartengines.com/midv-500/extra/midv-2019/dataset/46_ury_passport.zip",
"ftp://smartengines.com/midv-500/extra/midv-2019/dataset/47_usa_bordercrossing.zip",
"ftp://smartengines.com/midv-500/extra/midv-2019/dataset/48_usa_passportcard.zip",
"ftp://smartengines.com/midv-500/extra/midv-2019/dataset/49_usa_ssn82.zip",
"ftp://smartengines.com/midv-500/extra/midv-2019/dataset/50_xpo_id.zip",
]
def download_dataset(download_dir: str, dataset_name: str = "midv500"):
"""
This script downloads the MIDV-500 dataset with extra files and unzips the folders.
dataset_name: str
"midv500": https://doi.org/10.18287/2412-6179-2019-43-5-818-824
"midv2019": https://doi.org/10.1117/12.2558438
"all": midv500 + midv2019
"""
if dataset_name == "midv500":
links_set = {
"midv500": midv500_links,
}
elif dataset_name == "midv2019":
links_set = {
"midv2019": midv2019_links,
}
elif dataset_name == "all":
links_set = {
"midv500": midv500_links,
"midv2019": midv2019_links,
}
else:
Exception('Invalid dataset_name, try one of "midv500", "midv2019" or "all".')
for k, v in links_set.items():
dst = os.path.join(download_dir, k)
for link in v:
print("--------------------------------------------------------------")
# download zip file
link = link.replace("\\", "/") # for windows
filename = link.split("/")[-1]
print("\nDownloading:", filename)
download(link, dst)
print("Downloaded:", filename)
# unzip zip file
print("Unzipping:", filename)
zip_path = os.path.join(dst, filename)
unzip(zip_path, dst)
print("Unzipped:", filename.replace(".zip", ""))
# remove zip file
os.remove(zip_path)
if __name__ == "__main__":
# construct the argument parser
ap = argparse.ArgumentParser()
# add the arguments to the parser
ap.add_argument(
"download_dir",
default="data/",
help="Directory for MIDV-500 dataset to be downloaded.",
)
args = vars(ap.parse_args())
# download dataset
download_dataset(args["download_dir"])
| 54.127168
| 90
| 0.705788
| 1,390
| 9,364
| 4.571223
| 0.133094
| 0.11237
| 0.283286
| 0.346239
| 0.839156
| 0.82751
| 0.822002
| 0.8209
| 0.8209
| 0.81492
| 0
| 0.099589
| 0.117471
| 9,364
| 172
| 91
| 54.44186
| 0.669288
| 0.041756
| 0
| 0.047945
| 0
| 0.342466
| 0.76732
| 0.738668
| 0
| 0
| 0
| 0
| 0
| 1
| 0.006849
| false
| 0.219178
| 0.020548
| 0
| 0.027397
| 0.034247
| 0
| 0
| 0
| null | 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 12
|
f236ea30d7814e6e5f7e36351bc7667f7fad4f04
| 125
|
py
|
Python
|
steam/utils/__init__.py
|
ivicel/steamkit-python
|
0a3f250e432cf890965db5e7245841aa512bca22
|
[
"Apache-2.0"
] | 5
|
2018-11-16T08:59:41.000Z
|
2021-04-03T05:32:18.000Z
|
steam/utils/__init__.py
|
ivicel/steamkit-python
|
0a3f250e432cf890965db5e7245841aa512bca22
|
[
"Apache-2.0"
] | null | null | null |
steam/utils/__init__.py
|
ivicel/steamkit-python
|
0a3f250e432cf890965db5e7245841aa512bca22
|
[
"Apache-2.0"
] | null | null | null |
from .util import clear_proto_mask, is_proto_msg, add_proto_mask
__all__ = [clear_proto_mask, is_proto_msg, add_proto_mask]
| 31.25
| 64
| 0.84
| 22
| 125
| 4.045455
| 0.454545
| 0.404494
| 0.314607
| 0.359551
| 0.808989
| 0.808989
| 0.808989
| 0.808989
| 0.808989
| 0
| 0
| 0
| 0.096
| 125
| 4
| 65
| 31.25
| 0.787611
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.5
| 0
| 0.5
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
|
0
| 11
|
4b37117f289d4054432d5850f0a931ebb4548e7d
| 4,523
|
py
|
Python
|
resources/tests/test_perf.py
|
HotStew/respa
|
04f39efb15b4f4206a122e665f8377c7198e1f25
|
[
"MIT"
] | 49
|
2015-10-21T06:25:31.000Z
|
2022-03-20T07:24:20.000Z
|
resources/tests/test_perf.py
|
HotStew/respa
|
04f39efb15b4f4206a122e665f8377c7198e1f25
|
[
"MIT"
] | 728
|
2015-06-24T13:26:54.000Z
|
2022-03-24T12:18:41.000Z
|
resources/tests/test_perf.py
|
digipointtku/respa
|
a529e0df4d3f072df7801adb5bf97a5f4abd1243
|
[
"MIT"
] | 46
|
2015-06-26T10:52:57.000Z
|
2021-12-17T09:38:25.000Z
|
from datetime import datetime
import arrow
import pytest
from django.conf import settings
from resources.models import Day, Period, Reservation, Resource, ResourceType, Unit
TEST_PERFORMANCE = bool(getattr(settings, "TEST_PERFORMANCE", False))
@pytest.mark.skipif(not TEST_PERFORMANCE, reason="TEST_PERFORMANCE not enabled")
@pytest.mark.django_db
def test_api_resource_scalability(api_client):
u1 = Unit.objects.create(name='Unit 1', id='unit_1', time_zone='Europe/Helsinki')
rt = ResourceType.objects.create(name='Type 1', id='type_1', main_type='space')
p1 = Period.objects.create(start='2015-06-01', end='2015-09-01', unit=u1, name='')
Day.objects.create(period=p1, weekday=0, opens='08:00', closes='22:00')
Day.objects.create(period=p1, weekday=1, opens='08:00', closes='16:00')
# make reservations for the whole day
begin_res = arrow.get('2015-06-01T08:00:00Z').datetime
end_res = arrow.get('2015-06-01T16:00:00Z').datetime
perf_res_list = open('perf_res_list.csv', 'w')
perf_res_avail = open('perf_res_avail.csv', 'w')
perf_reservation = open('perf_reservation.csv', 'w')
perf_res_list.write('Resource listing\n')
perf_res_list.write('resources, time (s)\n')
perf_res_avail.write('Availability listing\n')
perf_res_avail.write('resources, time (s)\n')
perf_reservation.write('Single resource availability\n')
perf_reservation.write('Total reservations, time (s)\n')
for n in [1, 10, 100, 1000]:
Resource.objects.all().delete()
for i in range(n):
resource = Resource.objects.create(name=('Resource ' + str(i)), id=('r' + str(i)), unit=u1, type=rt)
Reservation.objects.create(resource=resource, begin=begin_res, end=end_res)
# Time the resource listing (resource query and serialization ~ O(n))
start = datetime.now()
response = api_client.get('/v1/resource/')
end = datetime.now()
perf_res_list.write(str(n) + ', ' + str(end - start) + '\n')
# Time the availability listing (resource and reservation queries, serialization and filtering ~ O(n)+O(n))
start = datetime.now()
response = api_client.get('/v1/resource/?start=2015-06-01T08:00:00Z&end=2015-06-01T16:00:00Z&duration=5000')
end = datetime.now()
perf_res_avail.write(str(n) + ', ' + str(end - start) + '\n')
# Time single resource availability (resource and reservation queries and serialization ~ O(1))
start = datetime.now()
response = api_client.get('/v1/resource/r0?start=2015-06-01T08:00:00Z&end=2015-06-01T16:00:00Z&duration=5000')
end = datetime.now()
perf_reservation.write(str(n) + ', ' + str(end - start) + '\n')
@pytest.mark.skipif(not TEST_PERFORMANCE, reason="TEST_PERFORMANCE not enabled")
@pytest.mark.django_db
def test_avail_resource_scalability(client):
u1 = Unit.objects.create(name='Unit 1', id='unit_1', time_zone='Europe/Helsinki')
rt = ResourceType.objects.create(name='Type 1', id='type_1', main_type='space')
p1 = Period.objects.create(start='2015-06-01', end='2015-09-01', unit=u1, name='')
Day.objects.create(period=p1, weekday=0, opens='08:00', closes='22:00')
Day.objects.create(period=p1, weekday=1, opens='08:00', closes='16:00')
# make reservations for the whole day
begin_res = arrow.get('2015-06-01T08:00:00Z').datetime
end_res = arrow.get('2015-06-01T16:00:00Z').datetime
perf_res_list = open('perf_res_list.csv', 'w')
perf_res_avail = open('perf_res_avail.csv', 'w')
perf_reservation = open('perf_reservation.csv', 'w')
perf_res_list.write('Resource listing\n')
perf_res_list.write('resources, time (s)\n')
perf_res_avail.write('Availability listing\n')
perf_res_avail.write('resources, time (s)\n')
perf_reservation.write('Single resource availability\n')
perf_reservation.write('Total reservations, time (s)\n')
for n in [1, 10, 100, 1000]:
Resource.objects.all().delete()
for i in range(n):
resource = Resource.objects.create(name=('Resource ' + str(i)), id=('r' + str(i)), unit=u1, type=rt)
Reservation.objects.create(resource=resource, begin=begin_res, end=end_res)
# Time the general availability for n resources and reservations
start = datetime.now()
response = client.get('/test/availability?start_date=2015-06-01&end_date=2015-06-30')
end = datetime.now()
perf_res_list.write(str(n) + ', ' + str(end - start) + '\n')
| 50.255556
| 118
| 0.676984
| 659
| 4,523
| 4.520486
| 0.165402
| 0.044646
| 0.036925
| 0.032226
| 0.801947
| 0.80094
| 0.80094
| 0.793891
| 0.784156
| 0.768714
| 0
| 0.066013
| 0.16604
| 4,523
| 89
| 119
| 50.820225
| 0.723754
| 0.088879
| 0
| 0.8
| 0
| 0.028571
| 0.240399
| 0.053476
| 0
| 0
| 0
| 0
| 0
| 1
| 0.028571
| false
| 0
| 0.071429
| 0
| 0.1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
4b7d11da5ac6e1b0ebc4170e7d035cb4092ec2fa
| 1,377
|
py
|
Python
|
algorithms/tests/test_string_matching.py
|
t3rm1n4l/python-algorithms
|
0fbcb38b26d8690028cd5a676743950fdf3a060f
|
[
"MIT"
] | 1
|
2018-05-02T07:37:43.000Z
|
2018-05-02T07:37:43.000Z
|
algorithms/tests/test_string_matching.py
|
t3rm1n4l/python-algorithms
|
0fbcb38b26d8690028cd5a676743950fdf3a060f
|
[
"MIT"
] | null | null | null |
algorithms/tests/test_string_matching.py
|
t3rm1n4l/python-algorithms
|
0fbcb38b26d8690028cd5a676743950fdf3a060f
|
[
"MIT"
] | null | null | null |
import unittest
import string_matching
class StringMatchingTest(unittest.TestCase):
def test_string_matching_naive(self):
t = 'ababbababa'
s = 'aba'
self.assertEquals(string_matching.string_matching_naive(t, s), [0, 5, 7])
t = 'ababbababa'
s = 'abbb'
self.assertEquals(string_matching.string_matching_naive(t, s), [])
def test_string_matching_rabin_karp(self):
t = 'ababbababa'
s = 'aba'
self.assertEquals(string_matching.string_matching_rabin_karp(t, s), [0, 5, 7])
t = 'ababbababa'
s = 'abbb'
self.assertEquals(string_matching.string_matching_rabin_karp(t, s), [])
def test_string_matching_knuth_morris_pratt(self):
t = 'ababbababa'
s = 'aba'
self.assertEquals(string_matching.string_matching_knuth_morris_pratt(t, s), [0, 5, 7])
t = 'ababbababa'
s = 'abbb'
self.assertEquals(string_matching.string_matching_knuth_morris_pratt(t, s), [])
def test_string_matching_boyer_moore_horspool(self):
t = 'ababbababa'
s = 'aba'
self.assertEquals(string_matching.string_matching_boyer_moore_horspool(t, s), [0, 5, 7])
t = 'ababbababa'
s = 'abbb'
self.assertEquals(string_matching.string_matching_boyer_moore_horspool(t, s), [])
if __name__ == '__main__':
unittest.main()
| 33.585366
| 96
| 0.655773
| 169
| 1,377
| 4.988166
| 0.183432
| 0.348754
| 0.113879
| 0.284698
| 0.845789
| 0.805457
| 0.730724
| 0.730724
| 0.730724
| 0.716489
| 0
| 0.011289
| 0.228032
| 1,377
| 40
| 97
| 34.425
| 0.78175
| 0
| 0
| 0.484848
| 0
| 0
| 0.084302
| 0
| 0
| 0
| 0
| 0
| 0.242424
| 1
| 0.121212
| false
| 0
| 0.060606
| 0
| 0.212121
| 0
| 0
| 0
| 0
| null | 1
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
4b90f733d945576384389e3af5e8eb7b26b24785
| 137
|
py
|
Python
|
gan_provider.py
|
jiameng1010/pointNet
|
17d230f46f64136baba2c3d6cb7f05ab4bbb9f31
|
[
"MIT"
] | null | null | null |
gan_provider.py
|
jiameng1010/pointNet
|
17d230f46f64136baba2c3d6cb7f05ab4bbb9f31
|
[
"MIT"
] | null | null | null |
gan_provider.py
|
jiameng1010/pointNet
|
17d230f46f64136baba2c3d6cb7f05ab4bbb9f31
|
[
"MIT"
] | 1
|
2019-02-03T12:19:36.000Z
|
2019-02-03T12:19:36.000Z
|
from tensorflow.contrib.slim.python.slim.data import data_provider
from tensorflow.contrib.slim.python.slim.data import parallel_reader
| 34.25
| 68
| 0.861314
| 20
| 137
| 5.8
| 0.5
| 0.241379
| 0.362069
| 0.431034
| 0.775862
| 0.775862
| 0.775862
| 0.775862
| 0
| 0
| 0
| 0
| 0.065693
| 137
| 3
| 69
| 45.666667
| 0.90625
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 11
|
4bbd0476d4a8b8dde9a872c84a83e121621a1703
| 16,649
|
py
|
Python
|
GeneratorTest.py
|
Autio/swb_datascraping
|
ef31fd89c68d86849342495b79985572d0f2fc61
|
[
"MIT"
] | null | null | null |
GeneratorTest.py
|
Autio/swb_datascraping
|
ef31fd89c68d86849342495b79985572d0f2fc61
|
[
"MIT"
] | null | null | null |
GeneratorTest.py
|
Autio/swb_datascraping
|
ef31fd89c68d86849342495b79985572d0f2fc61
|
[
"MIT"
] | null | null | null |
__author__ = 'petriau'
import requests # for HTTP requests
from bs4 import BeautifulSoup # for HTML parsing
url_SBM_FinanceProgress = 'http://sbm.gov.in/sbmreport/Report/Financial/SBM_StateReleaseAllocationincludingUnapproved.aspx'
# Function to return HTML parsed with BeautifulSoup from a POST request URL and parameters.
def parsePOSTResponse(URL, parameters=''):
responseHTMLParsed = ''
r = requests.post(URL, data=parameters)
if r.status_code == 200:
responseHTML = r.content
responseHTMLParsed = BeautifulSoup(responseHTML, 'html.parser')
return responseHTMLParsed
listTest = [['__EVENTARGUMENT',''],['__EVENTTARGET', 'ctl00$ContentPlaceHolder1$rptr_state$ctl03$lnkbtn_stName'],['__EVENTVALIDATION',"/wEWTAL2js/IBwLq6fiEBwK4qJKGBgL7uLfDBQLMho26CAKkvMv0BAKrp/OzCAKzzOWcCQLfxNm+CQLZ25fbDALc9b7CDALYxrzSBgLK5tedAQLJrc6KBwLD2Nb1DwKAz9S2BQLD2JrzAgKAz/jyBAKUlKjOAgKvk9PyBQKUlOzLBQKvk5ewAQKNhuujBAK2ocCIDQKNhq+hBwK2oaT3BwLW5PDOBwLdiLPJCQLW5LTMCgLdiPeGBQLPqct8AqCWu5oDAs+pj/oDAqCW/9cOArTAhacLArv/sNsBArTAyaQOArv/1JcBApnX36oFAtbopuwKApnXo6gIAtbo6qkGAv7tmdUPAvHRnK0JAv7t3dICAvHRgJwEAtuMv40FAoTehsMOAtuMg4sIAoTeyoAKAtTR8eIFAquz5dgPAtTRhc0LAquz+e4CAs3DtJABAoKu9fINAs3DyPoGAoKuibEHAp7/hZEKAuGFkd0CAp7/mfsPAuGFpfMFApfxyL4FAriAoXcCl/HcqAsCuIC1tQoC4M/OkQ8Cv46O1Q0C4M/i+wQCv46iawLZlNH1CQKqnLrXDQLZlJXzDAKqnM5tAr6ri/gNAsWF0MkLUJ4OhBgatkYSQhamBAvcsSVIgC8="],
['__VIEWSTATE',"/wEPDwUKMTQwNTE3ODMyMg9kFgJmD2QWAgIDD2QWBAIfDw8WBB4EVGV4dAVMPHNwYW4gY2xhc3M9ImdseXBoaWNvbiBnbHlwaGljb24tY2lyY2xlLWFycm93LWxlZnQiPjwvc3Bhbj4gQmFjayB0byBQcmV2aW91cx4HVmlzaWJsZWcWAh4Hb25jbGljawUoamF2YXNjcmlwdDpoaXN0b3J5LmJhY2soKTsgcmV0dXJuIGZhbHNlO2QCIQ9kFgICAQ9kFggCAw8PFgIfAAUIKENlbnRlcilkZAIFDw8WAh8ABQsoMjAxNi0yMDE3KWRkAgcPFgIfAWgWBAIBDxYCHwFoFgQCAw8QZGQWAWZkAgcPEA8WBh4NRGF0YVRleHRGaWVsZAUHRmluWWVhch4ORGF0YVZhbHVlRmllbGQFB0ZpblllYXIeC18hRGF0YUJvdW5kZ2QQFQIKLS1TZWxlY3QtLQkyMDE2LTIwMTcVAgItMgkyMDE2LTIwMTcUKwMCZ2cWAQIBZAIDD2QWAgIBDw8WBB8ABRRSZWNvcmQgTm90IEZvdW5kICEhIR8BaGRkAgkPFgIeC18hSXRlbUNvdW50AiAWQgIBD2QWBmYPFQEBMWQCAQ8PFgIfAAUMQSAmIE4gSWxhbmRzZGQCBA8VCgYzNDAuMDAEMC4wMAQwLjAwBjM0MC4wMAQwLjAwBTEzLjU5BTEzLjU5BDQuMDAGMzQwLjAwBjMyNi40MWQCAg9kFgZmDxUBATJkAgEPDxYCHwAFDkFuZGhyYSBQcmFkZXNoZGQCBA8VCgc4NTk2LjY5BzY3NzIuODUEMS4xNAgxNTM3MC42OAc1NjQ5LjkzBzMzNDMuNjEHODk5My41NAU1OC41MQc5NzIwLjc1BzYzNzcuMTRkAgMPZBYGZg8VAQEzZAIBDw8WAh8ABRFBcnVuYWNoYWwgUHJhZGVzaGRkAgQPFQoHMTQ2NS44OAY5NjguNTEEMC4wMAcyNDM0LjM5BDAuMDAGMTA4LjAzBjEwOC4wMwQ0LjQ0BzI0MzQuMzkHMjMyNi4zNWQCBA9kFgZmDxUBATRkAgEPDxYCHwAFBUFzc2FtZGQCBA8VCggxNjExMC43OAQwLjAwBDAuMDAIMTYxMTAuNzgGNjg2LjE5BjkxNi4yNwcxNjAyLjQ2BDkuOTUIMTU0MjQuNjAIMTQ1MDguMzJkAgUPZBYGZg8VAQE1ZAIBDw8WAh8ABQVCaWhhcmRkAgQPFQoHNDIwMC4zNQgxMzE4Ni4zNwQwLjAwCDE3Mzg2LjcyBjY4Ni45OAcxMjI2LjgwBzE5MTMuNzgFMTEuMDEIMTY2OTkuNzQIMTU0NzIuOTRkAgYPZBYGZg8VAQE2ZAIBDw8WAh8ABQxDaGhhdHRpc2dhcmhkZAIEDxUKCC01OTYyLjUxBzk5NDcuNTcEMC4wMAczOTg1LjA2BjU3MS4xNgcxODY5LjE5BzI0NDAuMzQFNjEuMjQHMzQxMy45MQcxNTQ0LjcyZAIHD2QWBmYPFQEBN2QCAQ8PFgIfAAUMRCAmIE4gSGF2ZWxpZGQCBA8VCgQxLjQ4BDAuMDAEMC4wMAQxLjQ4BDAuMDAEMC4wMAQwLjAwBDAuMDAEMS40OAQxLjQ4ZAIID2QWBmYPFQEBOGQCAQ8PFgIfAAUDR29hZGQCBA8VCgctMzMzLjk1BDAuMDAEMC4wMActMzMzLjk1BDAuMDAHMjA5NC40OAcyMDk0LjQ4BDAuMDAHLTMzMy45NQgtMjQyOC40M2QCCQ9kFgZmDxUBATlkAgEPDxYCHwAFB0d1amFyYXRkZAIEDxUKCC00Njg4LjA0CDI4MDQ5LjI2BDAuMjMIMjMzNjEuNDUHMjAwNS4zNgc0MTc5LjAzBzYxODQuMzkFMjYuNDcIMjEzNTYuMDgIMTcxNzcuMDZkAgoPZBYGZg8VAQIxMGQCAQ8PFgIfAAUHSGFyeWFuYWRkAgQPFQoGNzc0LjQ5BzY4NzkuMDcEMi4zNQc3NjU1LjkyBjIwOC40MgU5MS42MQYzMDAuMDMEMy45Mgc3NDQ3LjUwBzczNTUuODlkAgsPZBYGZg8VAQIxMWQCAQ8PFgIfAAUQSGltYWNoYWwgUHJhZGVzaGRkAgQPFQoHNTI4My4yOAQwLjAwBTI0LjAzBzUzMDcuMzEGMzEzLjY0BjY2Ni41NgY5ODAuMjAFMTguNDcHNDk5My42Nwc0MzI3LjExZAIMD2QWBmYPFQECMTJkAgEPDxYCHwAFD0phbW11ICYgS2FzaG1pcmRkAgQPFQoHNTM5OS4zNwYyMjkuOTAEMC4wMAc1NjI5LjI3BjEwMS43MQU1MS44NQYxNTMuNTYEMi43Mwc1NTI3LjU1BzU0NzUuNzBkAg0PZBYGZg8VAQIxM2QCAQ8PFgIfAAUJSmhhcmtoYW5kZGQCBA8VCgktMTIyNzYuNjMIMTAzNTguOTYENC4xMggtMTkxMy41NQcxMzkwLjc2BzIzNTcuMjIHMzc0Ny45OAQwLjAwCC0zMzA0LjMxCC01NjYxLjUyZAIOD2QWBmYPFQECMTRkAgEPDxYCHwAFCUthcm5hdGFrYWRkAgQPFQoILTUwNDAuNjQIMTI2NzEuNTAEMC4wMAc3NjMwLjg2Bjk0OS40MwczMzA1LjYyBzQyNTUuMDUFNTUuNzYHNjY4MS40MwczMzc1LjgxZAIPD2QWBmYPFQECMTVkAgEPDxYCHwAFBktlcmFsYWRkAgQPFQoHMjg5MC45MgQwLjAwBDIuODIHMjg5My43NAYxMDcuNjkENS4xMQYxMTIuODAEMy45MAcyNzg2LjA1BzI3ODAuOTRkAhAPZBYGZg8VAQIxNmQCAQ8PFgIfAAUOTWFkaHlhIFByYWRlc2hkZAIEDxUKCS0xNTYzMy43NAgzNDIyMy41MwUyNS4wMAgxODYxNC43OQc5MzYwLjU0BzM0NzIuOTUIMTI4MzMuNDkFNjguOTQHOTI1NC4yNAc1NzgxLjI5ZAIRD2QWBmYPFQECMTdkAgEPDxYCHwAFC01haGFyYXNodHJhZGQCBA8VCggtNDMzMy4xNwgyNjQ0Ny4wOQQwLjAwCDIyMTEzLjkyBjMyNy42OAczNDg5LjAxBzM4MTYuNjkFMTcuMjYIMjE3ODYuMjMIMTgyOTcuMjNkAhIPZBYGZg8VAQIxOGQCAQ8PFgIfAAUHTWFuaXB1cmRkAgQPFQoHLTQ2Ni4yOQcyNzI3LjUwBDAuMDAHMjI2MS4yMQQwLjAwBjE1NS42MwYxNTUuNjMENi44OAcyMjYxLjIxBzIxMDUuNThkAhMPZBYGZg8VAQIxOWQCAQ8PFgIfAAUJTWVnaGFsYXlhZGQCBA8VCgcxNzI3LjY3BzQxMjIuMjQEMC4wMAc1ODQ5LjkxBjIyOS42MAYxMDguMjUGMzM3Ljg1BDUuNzgHNTYyMC4zMQc1NTEyLjA2ZAIUD2QWBmYPFQECMjBkAgEPDxYCHwAFB01pem9yYW1kZAIEDxUKBjM2NC4zMwQwLjAwBDAuMDAGMzY0LjMzBTk1LjExBTczLjgyBjE2OC45MwU0Ni4zNwYyNjkuMjMGMTk1LjQwZAIVD2QWBmYPFQECMjFkAgEPDxYCHwAFCE5hZ2FsYW5kZGQCBA8VCgYzMDIuMDMEMC4wMAQ5Ljg3BjMxMS45MAYxNzMuNDMEMi4yNwYxNzUuNzAFNTYuMzMGMTM4LjQ3BjEzNi4yMGQCFg9kFgZmDxUBAjIyZAIBDw8WAh8ABQZPZGlzaGFkZAIEDxUKCS01MDYzMS40Nwg0NTg1Ni42MQQwLjAwCC00Nzc0Ljg2Bzk1MTAuMzgHMjI4MC4zNQgxMTc5MC43MwQwLjAwCS0xNDI4NS4yNAktMTY1NjUuNTlkAhcPZBYGZg8VAQIyM2QCAQ8PFgIfAAUKUHVkdWNoZXJyeWRkAgQPFQoGNjYzLjEyBDAuMDAEMC4wMAY2NjMuMTIEMC4wMAQwLjAwBDAuMDAEMC4wMAY2NjMuMTIGNjYzLjEyZAIYD2QWBmYPFQECMjRkAgEPDxYCHwAFBlB1bmphYmRkAgQPFQoILTE2NTUuMjkHMjQ4Mi44NQQwLjAwBjgyNy41NgYxNTQuOTIGNTE4LjkwBjY3My44MwU4MS40MgY2NzIuNjMGMTUzLjczZAIZD2QWBmYPFQECMjVkAgEPDxYCHwAFCVJhamFzdGhhbmRkAgQPFQoJLTMwNTk3LjUwCDYyNzMwLjA1BDAuMDAIMzIxMzIuNTUHNjQxNC45Mwc1ODA4LjUyCDEyMjIzLjQ1BTM4LjA0CDI1NzE3LjYyCDE5OTA5LjEwZAIaD2QWBmYPFQECMjZkAgEPDxYCHwAFBlNpa2tpbWRkAgQPFQoGNTE1LjM5BjQ4MC45NgQwLjAwBjk5Ni4zNQQwLjAwBDAuMDAEMC4wMAQwLjAwBjk5Ni4zNQY5OTYuMzVkAhsPZBYGZg8VAQIyN2QCAQ8PFgIfAAUKVGFtaWwgTmFkdWRkAgQPFQoJLTI0MTEwLjAxCDI2ODUwLjk0BDAuNjIHMjc0MS41NgY0NzguMTEGMTU3Ljg5BjYzNi4wMQUyMy4yMAcyMjYzLjQ0BzIxMDUuNTVkAhwPZBYGZg8VAQIyOGQCAQ8PFgIfAAUJVGVsYW5nYW5hZGQCBA8VCgc1ODE0LjI1BDAuMDAEMC4wMAc1ODE0LjI1BjY1Ni43OAc0NjgwLjI0BzUzMzcuMDIFOTEuNzkHNTE1Ny40NwY0NzcuMjNkAh0PZBYGZg8VAQIyOWQCAQ8PFgIfAAUHVHJpcHVyYWRkAgQPFQoHMzYwNy40OAQwLjAwBDAuMDAHMzYwNy40OAU0MC4yMgYxMjguOTEGMTY5LjEzBDQuNjkHMzU2Ny4yNgczNDM4LjM2ZAIeD2QWBmYPFQECMzBkAgEPDxYCHwAFDVV0dGFyIFByYWRlc2hkZAIEDxUKCDI2OTIyLjIyCDE3ODY3LjQ0BDYuNjYINDQ3OTYuMzIHMzg1Ni44MAczNTE4LjMwBzczNzUuMTAFMTYuNDYINDA5MzkuNTIIMzc0MjEuMjFkAh8PZBYGZg8VAQIzMWQCAQ8PFgIfAAULVXR0YXJha2hhbmRkZAIEDxUKCC0xNjU4LjI3Bzg1MjkuMTMEMC4wMAc2ODcwLjg2BzEyMjAuMDkGNjQwLjc5BzE4NjAuODgFMjcuMDgHNTY1MC43Nwc1MDA5Ljk3ZAIgD2QWBmYPFQECMzJkAgEPDxYCHwAFC1dlc3QgQmVuZ2FsZGQCBA8VCgktMTYyMTkuNjcIMzI4NzUuNjAEMC4wMAgxNjY1NS45MwcxNzI3LjgxBzY2NDYuMTkHODM3NC4wMAU1MC4yOAgxNDkyOC4xMgc4MjgxLjkzZAIhD2QWAgICDxUKCS04ODYyNy40NQkzNTQyNTcuOTMFNzYuODQJMjY1NzA3LjM0CDQ2OTE3LjY3CDUxOTEwLjk5CDk4ODI4LjY3BTM3LjE5CTIxODc4OS42NAkxNjY4NzguNjRkGAEFHl9fQ29udHJvbHNSZXF1aXJlUG9zdEJhY2tLZXlfXxYDBQ9jdGwwMCRpY29uX3dvcmQFEGN0bDAwJGljb25fZXhjZWwFEmN0bDAwJGljb25fcHJpbnRlcqLkin/PLgDvwcsQ6/a18eF5HbFe"]
]
paramDictionary = {key: str(value) for key, value in listTest}
def merge_two_dicts(x, y):
'''Given two dicts, merge them into a new dict as a shallow copy.'''
z = x.copy()
z.update(y)
return z
postParams = {
# '__EVENTARGUMENT': '',
# '__EVENTTARGET': 'ctl00$ContentPlaceHolder1$rptr_state$ctl03$lnkbtn_stName',
# '__EVENTVALIDATION': "/wEWTAL2js/IBwLq6fiEBwK4qJKGBgL7uLfDBQLMho26CAKkvMv0BAKrp/OzCAKzzOWcCQLfxNm+CQLZ25fbDALc9b7CDALYxrzSBgLK5tedAQLJrc6KBwLD2Nb1DwKAz9S2BQLD2JrzAgKAz/jyBAKUlKjOAgKvk9PyBQKUlOzLBQKvk5ewAQKNhuujBAK2ocCIDQKNhq+hBwK2oaT3BwLW5PDOBwLdiLPJCQLW5LTMCgLdiPeGBQLPqct8AqCWu5oDAs+pj/oDAqCW/9cOArTAhacLArv/sNsBArTAyaQOArv/1JcBApnX36oFAtbopuwKApnXo6gIAtbo6qkGAv7tmdUPAvHRnK0JAv7t3dICAvHRgJwEAtuMv40FAoTehsMOAtuMg4sIAoTeyoAKAtTR8eIFAquz5dgPAtTRhc0LAquz+e4CAs3DtJABAoKu9fINAs3DyPoGAoKuibEHAp7/hZEKAuGFkd0CAp7/mfsPAuGFpfMFApfxyL4FAriAoXcCl/HcqAsCuIC1tQoC4M/OkQ8Cv46O1Q0C4M/i+wQCv46iawLZlNH1CQKqnLrXDQLZlJXzDAKqnM5tAr6ri/gNAsWF0MkLUJ4OhBgatkYSQhamBAvcsSVIgC8=",
# '__VIEWSTATE': "/wEPDwUKMTQwNTE3ODMyMg9kFgJmD2QWAgIDD2QWBAIfDw8WBB4EVGV4dAVMPHNwYW4gY2xhc3M9ImdseXBoaWNvbiBnbHlwaGljb24tY2lyY2xlLWFycm93LWxlZnQiPjwvc3Bhbj4gQmFjayB0byBQcmV2aW91cx4HVmlzaWJsZWcWAh4Hb25jbGljawUoamF2YXNjcmlwdDpoaXN0b3J5LmJhY2soKTsgcmV0dXJuIGZhbHNlO2QCIQ9kFgICAQ9kFggCAw8PFgIfAAUIKENlbnRlcilkZAIFDw8WAh8ABQsoMjAxNi0yMDE3KWRkAgcPFgIfAWgWBAIBDxYCHwFoFgQCAw8QZGQWAWZkAgcPEA8WBh4NRGF0YVRleHRGaWVsZAUHRmluWWVhch4ORGF0YVZhbHVlRmllbGQFB0ZpblllYXIeC18hRGF0YUJvdW5kZ2QQFQIKLS1TZWxlY3QtLQkyMDE2LTIwMTcVAgItMgkyMDE2LTIwMTcUKwMCZ2cWAQIBZAIDD2QWAgIBDw8WBB8ABRRSZWNvcmQgTm90IEZvdW5kICEhIR8BaGRkAgkPFgIeC18hSXRlbUNvdW50AiAWQgIBD2QWBmYPFQEBMWQCAQ8PFgIfAAUMQSAmIE4gSWxhbmRzZGQCBA8VCgYzNDAuMDAEMC4wMAQwLjAwBjM0MC4wMAQwLjAwBTEzLjU5BTEzLjU5BDQuMDAGMzQwLjAwBjMyNi40MWQCAg9kFgZmDxUBATJkAgEPDxYCHwAFDkFuZGhyYSBQcmFkZXNoZGQCBA8VCgc4NTk2LjY5BzY3NzIuODUEMS4xNAgxNTM3MC42OAc1NjQ5LjkzBzMzNDMuNjEHODk5My41NAU1OC41MQc5NzIwLjc1BzYzNzcuMTRkAgMPZBYGZg8VAQEzZAIBDw8WAh8ABRFBcnVuYWNoYWwgUHJhZGVzaGRkAgQPFQoHMTQ2NS44OAY5NjguNTEEMC4wMAcyNDM0LjM5BDAuMDAGMTA4LjAzBjEwOC4wMwQ0LjQ0BzI0MzQuMzkHMjMyNi4zNWQCBA9kFgZmDxUBATRkAgEPDxYCHwAFBUFzc2FtZGQCBA8VCggxNjExMC43OAQwLjAwBDAuMDAIMTYxMTAuNzgGNjg2LjE5BjkxNi4yNwcxNjAyLjQ2BDkuOTUIMTU0MjQuNjAIMTQ1MDguMzJkAgUPZBYGZg8VAQE1ZAIBDw8WAh8ABQVCaWhhcmRkAgQPFQoHNDIwMC4zNQgxMzE4Ni4zNwQwLjAwCDE3Mzg2LjcyBjY4Ni45OAcxMjI2LjgwBzE5MTMuNzgFMTEuMDEIMTY2OTkuNzQIMTU0NzIuOTRkAgYPZBYGZg8VAQE2ZAIBDw8WAh8ABQxDaGhhdHRpc2dhcmhkZAIEDxUKCC01OTYyLjUxBzk5NDcuNTcEMC4wMAczOTg1LjA2BjU3MS4xNgcxODY5LjE5BzI0NDAuMzQFNjEuMjQHMzQxMy45MQcxNTQ0LjcyZAIHD2QWBmYPFQEBN2QCAQ8PFgIfAAUMRCAmIE4gSGF2ZWxpZGQCBA8VCgQxLjQ4BDAuMDAEMC4wMAQxLjQ4BDAuMDAEMC4wMAQwLjAwBDAuMDAEMS40OAQxLjQ4ZAIID2QWBmYPFQEBOGQCAQ8PFgIfAAUDR29hZGQCBA8VCgctMzMzLjk1BDAuMDAEMC4wMActMzMzLjk1BDAuMDAHMjA5NC40OAcyMDk0LjQ4BDAuMDAHLTMzMy45NQgtMjQyOC40M2QCCQ9kFgZmDxUBATlkAgEPDxYCHwAFB0d1amFyYXRkZAIEDxUKCC00Njg4LjA0CDI4MDQ5LjI2BDAuMjMIMjMzNjEuNDUHMjAwNS4zNgc0MTc5LjAzBzYxODQuMzkFMjYuNDcIMjEzNTYuMDgIMTcxNzcuMDZkAgoPZBYGZg8VAQIxMGQCAQ8PFgIfAAUHSGFyeWFuYWRkAgQPFQoGNzc0LjQ5BzY4NzkuMDcEMi4zNQc3NjU1LjkyBjIwOC40MgU5MS42MQYzMDAuMDMEMy45Mgc3NDQ3LjUwBzczNTUuODlkAgsPZBYGZg8VAQIxMWQCAQ8PFgIfAAUQSGltYWNoYWwgUHJhZGVzaGRkAgQPFQoHNTI4My4yOAQwLjAwBTI0LjAzBzUzMDcuMzEGMzEzLjY0BjY2Ni41NgY5ODAuMjAFMTguNDcHNDk5My42Nwc0MzI3LjExZAIMD2QWBmYPFQECMTJkAgEPDxYCHwAFD0phbW11ICYgS2FzaG1pcmRkAgQPFQoHNTM5OS4zNwYyMjkuOTAEMC4wMAc1NjI5LjI3BjEwMS43MQU1MS44NQYxNTMuNTYEMi43Mwc1NTI3LjU1BzU0NzUuNzBkAg0PZBYGZg8VAQIxM2QCAQ8PFgIfAAUJSmhhcmtoYW5kZGQCBA8VCgktMTIyNzYuNjMIMTAzNTguOTYENC4xMggtMTkxMy41NQcxMzkwLjc2BzIzNTcuMjIHMzc0Ny45OAQwLjAwCC0zMzA0LjMxCC01NjYxLjUyZAIOD2QWBmYPFQECMTRkAgEPDxYCHwAFCUthcm5hdGFrYWRkAgQPFQoILTUwNDAuNjQIMTI2NzEuNTAEMC4wMAc3NjMwLjg2Bjk0OS40MwczMzA1LjYyBzQyNTUuMDUFNTUuNzYHNjY4MS40MwczMzc1LjgxZAIPD2QWBmYPFQECMTVkAgEPDxYCHwAFBktlcmFsYWRkAgQPFQoHMjg5MC45MgQwLjAwBDIuODIHMjg5My43NAYxMDcuNjkENS4xMQYxMTIuODAEMy45MAcyNzg2LjA1BzI3ODAuOTRkAhAPZBYGZg8VAQIxNmQCAQ8PFgIfAAUOTWFkaHlhIFByYWRlc2hkZAIEDxUKCS0xNTYzMy43NAgzNDIyMy41MwUyNS4wMAgxODYxNC43OQc5MzYwLjU0BzM0NzIuOTUIMTI4MzMuNDkFNjguOTQHOTI1NC4yNAc1NzgxLjI5ZAIRD2QWBmYPFQECMTdkAgEPDxYCHwAFC01haGFyYXNodHJhZGQCBA8VCggtNDMzMy4xNwgyNjQ0Ny4wOQQwLjAwCDIyMTEzLjkyBjMyNy42OAczNDg5LjAxBzM4MTYuNjkFMTcuMjYIMjE3ODYuMjMIMTgyOTcuMjNkAhIPZBYGZg8VAQIxOGQCAQ8PFgIfAAUHTWFuaXB1cmRkAgQPFQoHLTQ2Ni4yOQcyNzI3LjUwBDAuMDAHMjI2MS4yMQQwLjAwBjE1NS42MwYxNTUuNjMENi44OAcyMjYxLjIxBzIxMDUuNThkAhMPZBYGZg8VAQIxOWQCAQ8PFgIfAAUJTWVnaGFsYXlhZGQCBA8VCgcxNzI3LjY3BzQxMjIuMjQEMC4wMAc1ODQ5LjkxBjIyOS42MAYxMDguMjUGMzM3Ljg1BDUuNzgHNTYyMC4zMQc1NTEyLjA2ZAIUD2QWBmYPFQECMjBkAgEPDxYCHwAFB01pem9yYW1kZAIEDxUKBjM2NC4zMwQwLjAwBDAuMDAGMzY0LjMzBTk1LjExBTczLjgyBjE2OC45MwU0Ni4zNwYyNjkuMjMGMTk1LjQwZAIVD2QWBmYPFQECMjFkAgEPDxYCHwAFCE5hZ2FsYW5kZGQCBA8VCgYzMDIuMDMEMC4wMAQ5Ljg3BjMxMS45MAYxNzMuNDMEMi4yNwYxNzUuNzAFNTYuMzMGMTM4LjQ3BjEzNi4yMGQCFg9kFgZmDxUBAjIyZAIBDw8WAh8ABQZPZGlzaGFkZAIEDxUKCS01MDYzMS40Nwg0NTg1Ni42MQQwLjAwCC00Nzc0Ljg2Bzk1MTAuMzgHMjI4MC4zNQgxMTc5MC43MwQwLjAwCS0xNDI4NS4yNAktMTY1NjUuNTlkAhcPZBYGZg8VAQIyM2QCAQ8PFgIfAAUKUHVkdWNoZXJyeWRkAgQPFQoGNjYzLjEyBDAuMDAEMC4wMAY2NjMuMTIEMC4wMAQwLjAwBDAuMDAEMC4wMAY2NjMuMTIGNjYzLjEyZAIYD2QWBmYPFQECMjRkAgEPDxYCHwAFBlB1bmphYmRkAgQPFQoILTE2NTUuMjkHMjQ4Mi44NQQwLjAwBjgyNy41NgYxNTQuOTIGNTE4LjkwBjY3My44MwU4MS40MgY2NzIuNjMGMTUzLjczZAIZD2QWBmYPFQECMjVkAgEPDxYCHwAFCVJhamFzdGhhbmRkAgQPFQoJLTMwNTk3LjUwCDYyNzMwLjA1BDAuMDAIMzIxMzIuNTUHNjQxNC45Mwc1ODA4LjUyCDEyMjIzLjQ1BTM4LjA0CDI1NzE3LjYyCDE5OTA5LjEwZAIaD2QWBmYPFQECMjZkAgEPDxYCHwAFBlNpa2tpbWRkAgQPFQoGNTE1LjM5BjQ4MC45NgQwLjAwBjk5Ni4zNQQwLjAwBDAuMDAEMC4wMAQwLjAwBjk5Ni4zNQY5OTYuMzVkAhsPZBYGZg8VAQIyN2QCAQ8PFgIfAAUKVGFtaWwgTmFkdWRkAgQPFQoJLTI0MTEwLjAxCDI2ODUwLjk0BDAuNjIHMjc0MS41NgY0NzguMTEGMTU3Ljg5BjYzNi4wMQUyMy4yMAcyMjYzLjQ0BzIxMDUuNTVkAhwPZBYGZg8VAQIyOGQCAQ8PFgIfAAUJVGVsYW5nYW5hZGQCBA8VCgc1ODE0LjI1BDAuMDAEMC4wMAc1ODE0LjI1BjY1Ni43OAc0NjgwLjI0BzUzMzcuMDIFOTEuNzkHNTE1Ny40NwY0NzcuMjNkAh0PZBYGZg8VAQIyOWQCAQ8PFgIfAAUHVHJpcHVyYWRkAgQPFQoHMzYwNy40OAQwLjAwBDAuMDAHMzYwNy40OAU0MC4yMgYxMjguOTEGMTY5LjEzBDQuNjkHMzU2Ny4yNgczNDM4LjM2ZAIeD2QWBmYPFQECMzBkAgEPDxYCHwAFDVV0dGFyIFByYWRlc2hkZAIEDxUKCDI2OTIyLjIyCDE3ODY3LjQ0BDYuNjYINDQ3OTYuMzIHMzg1Ni44MAczNTE4LjMwBzczNzUuMTAFMTYuNDYINDA5MzkuNTIIMzc0MjEuMjFkAh8PZBYGZg8VAQIzMWQCAQ8PFgIfAAULVXR0YXJha2hhbmRkZAIEDxUKCC0xNjU4LjI3Bzg1MjkuMTMEMC4wMAc2ODcwLjg2BzEyMjAuMDkGNjQwLjc5BzE4NjAuODgFMjcuMDgHNTY1MC43Nwc1MDA5Ljk3ZAIgD2QWBmYPFQECMzJkAgEPDxYCHwAFC1dlc3QgQmVuZ2FsZGQCBA8VCgktMTYyMTkuNjcIMzI4NzUuNjAEMC4wMAgxNjY1NS45MwcxNzI3LjgxBzY2NDYuMTkHODM3NC4wMAU1MC4yOAgxNDkyOC4xMgc4MjgxLjkzZAIhD2QWAgICDxUKCS04ODYyNy40NQkzNTQyNTcuOTMFNzYuODQJMjY1NzA3LjM0CDQ2OTE3LjY3CDUxOTEwLjk5CDk4ODI4LjY3BTM3LjE5CTIxODc4OS42NAkxNjY4NzguNjRkGAEFHl9fQ29udHJvbHNSZXF1aXJlUG9zdEJhY2tLZXlfXxYDBQ9jdGwwMCRpY29uX3dvcmQFEGN0bDAwJGljb25fZXhjZWwFEmN0bDAwJGljb25fcHJpbnRlcqLkin/PLgDvwcsQ6/a18eF5HbFe",
'ctl00$ContentPlaceHolder1$rptr_cen$ctl01$hfd_StateId':"26",
'ctl00$ContentPlaceHolder1$rptr_cen$ctl02$hfd_StateId':"1",
'ctl00$ContentPlaceHolder1$rptr_cen$ctl03$hfd_StateId':"2",
'ctl00$ContentPlaceHolder1$rptr_cen$ctl04$hfd_StateId':"3",
'ctl00$ContentPlaceHolder1$rptr_cen$ctl05$hfd_StateId':"4",
'ctl00$ContentPlaceHolder1$rptr_cen$ctl06$hfd_StateId':"34",
'ctl00$ContentPlaceHolder1$rptr_cen$ctl07$hfd_StateId':"28",
'ctl00$ContentPlaceHolder1$rptr_cen$ctl08$hfd_StateId':"5",
'ctl00$ContentPlaceHolder1$rptr_cen$ctl09$hfd_StateId':"6",
'ctl00$ContentPlaceHolder1$rptr_cen$ctl10$hfd_StateId':"7",
'ctl00$ContentPlaceHolder1$rptr_cen$ctl11$hfd_StateId':"8",
'ctl00$ContentPlaceHolder1$rptr_cen$ctl12$hfd_StateId':"9",
'ctl00$ContentPlaceHolder1$rptr_cen$ctl13$hfd_StateId':"35",
'ctl00$ContentPlaceHolder1$rptr_cen$ctl14$hfd_StateId':"10",
'ctl00$ContentPlaceHolder1$rptr_cen$ctl15$hfd_StateId':"11",
'ctl00$ContentPlaceHolder1$rptr_cen$ctl16$hfd_StateId':"12",
'ctl00$ContentPlaceHolder1$rptr_cen$ctl17$hfd_StateId':"13",
'ctl00$ContentPlaceHolder1$rptr_cen$ctl18$hfd_StateId':"14",
'ctl00$ContentPlaceHolder1$rptr_cen$ctl19$hfd_StateId':"15",
'ctl00$ContentPlaceHolder1$rptr_cen$ctl20$hfd_StateId':"16",
'ctl00$ContentPlaceHolder1$rptr_cen$ctl21$hfd_StateId':"17",
'ctl00$ContentPlaceHolder1$rptr_cen$ctl22$hfd_StateId':"18",
'ctl00$ContentPlaceHolder1$rptr_cen$ctl23$hfd_StateId':"32",
'ctl00$ContentPlaceHolder1$rptr_cen$ctl24$hfd_StateId':"19",
'ctl00$ContentPlaceHolder1$rptr_cen$ctl25$hfd_StateId':"20",
'ctl00$ContentPlaceHolder1$rptr_cen$ctl26$hfd_StateId':"21",
'ctl00$ContentPlaceHolder1$rptr_cen$ctl27$hfd_StateId':"22",
'ctl00$ContentPlaceHolder1$rptr_cen$ctl28$hfd_StateId':"36",
'ctl00$ContentPlaceHolder1$rptr_cen$ctl29$hfd_StateId':"23",
'ctl00$ContentPlaceHolder1$rptr_cen$ctl30$hfd_StateId':"24",
'ctl00$ContentPlaceHolder1$rptr_cen$ctl31$hfd_StateId':"33",
'ctl00$ContentPlaceHolder1$rptr_cen$ctl32$hfd_StateId':"25"
}
p = merge_two_dicts(postParams, paramDictionary)
componentPage = parsePOSTResponse(url_SBM_FinanceProgress, p)
print(componentPage)
x = 'what'
| 228.068493
| 5,731
| 0.910145
| 443
| 16,649
| 33.984199
| 0.376975
| 0.054201
| 0.063235
| 0.065892
| 0.850614
| 0.850614
| 0.850614
| 0.850614
| 0.850614
| 0.850614
| 0
| 0.101218
| 0.058262
| 16,649
| 72
| 5,732
| 231.236111
| 0.858983
| 0.400024
| 0
| 0
| 0
| 0.017544
| 0.829508
| 0.806655
| 0
| 1
| 0
| 0
| 0
| 1
| 0.035088
| false
| 0
| 0.035088
| 0
| 0.105263
| 0.017544
| 0
| 0
| 1
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 12
|
29cd522f460b996800fe0d9f2739255f875ef960
| 14,116
|
py
|
Python
|
qatrack/qatrack_core/tests/test_core.py
|
crcrewso/qatrackplus
|
b9da3bc542d9e3eca8b7291bb631d1c7255d528e
|
[
"MIT"
] | 20
|
2021-03-11T18:37:32.000Z
|
2022-03-23T19:38:07.000Z
|
qatrack/qatrack_core/tests/test_core.py
|
crcrewso/qatrackplus
|
b9da3bc542d9e3eca8b7291bb631d1c7255d528e
|
[
"MIT"
] | 75
|
2021-02-12T02:37:33.000Z
|
2022-03-29T20:56:16.000Z
|
qatrack/qatrack_core/tests/test_core.py
|
crcrewso/qatrackplus
|
b9da3bc542d9e3eca8b7291bb631d1c7255d528e
|
[
"MIT"
] | 5
|
2021-04-07T15:46:53.000Z
|
2021-09-18T16:55:00.000Z
|
import datetime
import re
from django.contrib.sites.models import Site
from django.core import mail
from django.test import TestCase
from django.urls import reverse
from django.utils import timezone
import numpy as np
import pandas as pd
import pytz
from qatrack.qa.tests import utils
from qatrack.qatrack_core.serializers import QATrackJSONEncoder
from qatrack.qatrack_core.utils import end_of_day, relative_dates, start_of_day
class TestLoginViews(TestCase):
def test_password_reset(self):
"""Test full cycle of password reset process"""
Site.objects.all().update(domain="")
u = utils.create_user()
self.client.post(reverse("password_reset"), {'email': u.email})
assert "Password reset" in mail.outbox[0].subject
url = re.search(r"(?P<url>https?://[^\s]+)", mail.outbox[0].body).group("url")
resp = self.client.get(url)
resp = self.client.post(
resp.url, {
'new_password1': '8P0Cut!v6XUr',
'new_password2': '8P0Cut!v6XUr',
}, follow=True
)
assert "/accounts/reset/done/" in resp.redirect_chain[0]
class TestJSONEncoder:
def test_np_int(self):
enc = QATrackJSONEncoder()
assert enc.default(np.int8(1)) == 1
def test_np_array(self):
enc = QATrackJSONEncoder()
assert enc.default(np.array(range(3))) == [0, 1, 2]
def test_range(self):
enc = QATrackJSONEncoder()
assert enc.default(range(3)) == [0, 1, 2]
def test_zip(self):
enc = QATrackJSONEncoder()
assert enc.default(zip(range(3), range(3))) == [(0, 0), (1, 1), (2, 2)]
def test_set(self):
enc = QATrackJSONEncoder()
assert set(enc.default(set(range(3)))) == set(range(3))
def test_pd_df(self):
enc = QATrackJSONEncoder()
d = {'col1': [1, 2], 'col2': [3, 4]}
df = pd.DataFrame(data=d)
expected = {'col1': {0: 1, 1: 2}, 'col2': {0: 3, 1: 4}}
assert enc.default(df) == expected
def test_datetime(self):
enc = QATrackJSONEncoder()
tz = pytz.timezone("America/Toronto")
dt = timezone.datetime(2020, 2, 29, 12, 34, 56, tzinfo=tz)
assert enc.default(dt) == "29 Feb 2020 12:34:56"
def test_date(self):
enc = QATrackJSONEncoder()
dt = datetime.date(2020, 2, 29)
assert enc.default(dt) == "29 Feb 2020"
class TestRelativeDates:
def setup_class(self):
self.tz = pytz.timezone("America/Toronto")
self.now = timezone.datetime(2020, 1, 2, 11, 38, tzinfo=self.tz)
self.day_start = start_of_day(self.now)
def test_next_7_days(self):
r = relative_dates("next 7 days", self.now)
end = end_of_day(timezone.datetime(2020, 1, 9, tzinfo=self.tz))
assert r.start() == self.day_start
assert r.end() == end
def test_next_30_days(self):
end = end_of_day(timezone.datetime(2020, 2, 1, tzinfo=self.tz))
r = relative_dates("next 30 days", self.now)
assert r.start() == self.day_start
assert r.end() == end
def test_next_365_days(self):
end = end_of_day(timezone.datetime(2021, 1, 1, tzinfo=self.tz))
r = relative_dates("next 365 days", self.now)
assert r.start() == self.day_start
assert r.end() == end
def test_next_week(self):
start = start_of_day(timezone.datetime(2020, 1, 5, 11, 38, tzinfo=pytz.timezone("America/Toronto")))
end = end_of_day(timezone.datetime(2020, 1, 11, tzinfo=pytz.timezone("America/Toronto")))
r = relative_dates("next week", self.now)
assert r.start() == start
assert r.end() == end
def test_next_week_sat(self):
pivot = timezone.datetime(2020, 1, 11, 11, 38, tzinfo=pytz.timezone("America/Toronto"))
start = start_of_day(timezone.datetime(2020, 1, 12, 11, 38, tzinfo=pytz.timezone("America/Toronto")))
end = end_of_day(timezone.datetime(2020, 1, 18, tzinfo=pytz.timezone("America/Toronto")))
r = relative_dates("next week", pivot)
assert r.start() == start
assert r.end() == end
def test_next_week_sun(self):
pivot = timezone.datetime(2020, 1, 12, 11, 38, tzinfo=pytz.timezone("America/Toronto"))
start = start_of_day(timezone.datetime(2020, 1, 19, 11, 38, tzinfo=pytz.timezone("America/Toronto")))
end = end_of_day(timezone.datetime(2020, 1, 25, tzinfo=pytz.timezone("America/Toronto")))
r = relative_dates("next week", pivot)
assert r.start() == start
assert r.end() == end
def test_next_month(self):
start = start_of_day(timezone.datetime(2020, 2, 1, 11, 38, tzinfo=pytz.timezone("America/Toronto")))
end = end_of_day(timezone.datetime(2020, 2, 29, tzinfo=pytz.timezone("America/Toronto")))
r = relative_dates("next month", self.now)
assert r.start() == start
assert r.end() == end
def test_next_month_first_day(self):
pivot = timezone.datetime(2020, 1, 1, 11, 38, tzinfo=pytz.timezone("America/Toronto"))
start = start_of_day(timezone.datetime(2020, 2, 1, 11, 38, tzinfo=pytz.timezone("America/Toronto")))
end = end_of_day(timezone.datetime(2020, 2, 29, tzinfo=pytz.timezone("America/Toronto")))
r = relative_dates("next month", pivot)
assert r.start() == start
assert r.end() == end
def test_next_month_last_day(self):
pivot = timezone.datetime(2020, 1, 31, 11, 38, tzinfo=pytz.timezone("America/Toronto"))
start = start_of_day(timezone.datetime(2020, 2, 1, 11, 38, tzinfo=pytz.timezone("America/Toronto")))
end = end_of_day(timezone.datetime(2020, 2, 29, tzinfo=pytz.timezone("America/Toronto")))
r = relative_dates("next month", pivot)
assert r.start() == start
assert r.end() == end
def test_next_year(self):
start = start_of_day(timezone.datetime(2021, 1, 1, 11, 38, tzinfo=pytz.timezone("America/Toronto")))
end = end_of_day(timezone.datetime(2021, 12, 31, tzinfo=pytz.timezone("America/Toronto")))
r = relative_dates("next year", self.now)
assert r.start() == start
assert r.end() == end
def test_this_week(self):
start = start_of_day(timezone.datetime(2019, 12, 29, 11, 38, tzinfo=pytz.timezone("America/Toronto")))
end = end_of_day(timezone.datetime(2020, 1, 4, tzinfo=pytz.timezone("America/Toronto")))
r = relative_dates("this week", self.now)
assert r.start() == start
assert r.end() == end
def test_this_week_sat(self):
pivot = timezone.datetime(2020, 1, 11, 11, 38, tzinfo=pytz.timezone("America/Toronto"))
start = start_of_day(timezone.datetime(2020, 1, 5, 11, 38, tzinfo=pytz.timezone("America/Toronto")))
end = end_of_day(timezone.datetime(2020, 1, 11, tzinfo=pytz.timezone("America/Toronto")))
r = relative_dates("this week", pivot)
assert r.start() == start
assert r.end() == end
def test_this_week_sun(self):
pivot = timezone.datetime(2020, 1, 5, 11, 38, tzinfo=pytz.timezone("America/Toronto"))
start = start_of_day(timezone.datetime(2020, 1, 5, 11, 38, tzinfo=pytz.timezone("America/Toronto")))
end = end_of_day(timezone.datetime(2020, 1, 11, tzinfo=pytz.timezone("America/Toronto")))
r = relative_dates("this week", pivot)
assert r.start() == start
assert r.end() == end
def test_this_year(self):
start = start_of_day(timezone.datetime(2020, 1, 1, 11, 38, tzinfo=pytz.timezone("America/Toronto")))
end = end_of_day(timezone.datetime(2020, 12, 31, tzinfo=pytz.timezone("America/Toronto")))
r = relative_dates("this year", self.now)
assert r.start() == start
assert r.end() == end
def test_this_year_jan_1(self):
pivot = timezone.datetime(2020, 1, 1, 11, 38, tzinfo=pytz.timezone("America/Toronto"))
start = start_of_day(timezone.datetime(2020, 1, 1, 11, 38, tzinfo=pytz.timezone("America/Toronto")))
end = end_of_day(timezone.datetime(2020, 12, 31, tzinfo=pytz.timezone("America/Toronto")))
r = relative_dates("this year", pivot)
assert r.start() == start
assert r.end() == end
def test_this_year_dec_31(self):
pivot = timezone.datetime(2020, 12, 31, 11, 38, tzinfo=pytz.timezone("America/Toronto"))
start = start_of_day(timezone.datetime(2020, 1, 1, 11, 38, tzinfo=pytz.timezone("America/Toronto")))
end = end_of_day(timezone.datetime(2020, 12, 31, tzinfo=pytz.timezone("America/Toronto")))
r = relative_dates("this year", pivot)
assert r.start() == start
assert r.end() == end
def test_last_7_days(self):
start = start_of_day(timezone.datetime(2019, 12, 26, 11, 38, tzinfo=pytz.timezone("America/Toronto")))
end = end_of_day(timezone.datetime(2020, 1, 2, tzinfo=pytz.timezone("America/Toronto")))
r = relative_dates("last 7 days", self.now)
assert r.start() == start
assert r.end() == end
def test_last_30_days(self):
start = start_of_day(timezone.datetime(2019, 12, 3, 11, 38, tzinfo=pytz.timezone("America/Toronto")))
end = end_of_day(timezone.datetime(2020, 1, 2, tzinfo=pytz.timezone("America/Toronto")))
r = relative_dates("last 30 days", self.now)
assert r.start() == start
assert r.end() == end
def test_last_365_days(self):
start = start_of_day(timezone.datetime(2019, 1, 2, 11, 38, tzinfo=pytz.timezone("America/Toronto")))
end = end_of_day(timezone.datetime(2020, 1, 2, tzinfo=pytz.timezone("America/Toronto")))
r = relative_dates("last 365 days", self.now)
assert r.start() == start
assert r.end() == end
def test_this_month(self):
start = start_of_day(timezone.datetime(2020, 1, 1, 11, 38, tzinfo=pytz.timezone("America/Toronto")))
end = end_of_day(timezone.datetime(2020, 1, 31, tzinfo=pytz.timezone("America/Toronto")))
r = relative_dates("this month", self.now)
assert r.start() == start
assert r.end() == end
def test_last_week(self):
start = start_of_day(timezone.datetime(2019, 12, 22, 11, 38, tzinfo=pytz.timezone("America/Toronto")))
end = end_of_day(timezone.datetime(2019, 12, 28, tzinfo=pytz.timezone("America/Toronto")))
r = relative_dates("last week", self.now)
assert r.start() == start
assert r.end() == end
def test_last_week_sat(self):
pivot = timezone.datetime(2020, 1, 4, 11, 38, tzinfo=pytz.timezone("America/Toronto"))
start = start_of_day(timezone.datetime(2019, 12, 22, 11, 38, tzinfo=pytz.timezone("America/Toronto")))
end = end_of_day(timezone.datetime(2019, 12, 28, tzinfo=pytz.timezone("America/Toronto")))
r = relative_dates("last week", pivot)
assert r.start() == start
assert r.end() == end
def test_last_week_sun(self):
pivot = timezone.datetime(2020, 1, 5, 11, 38, tzinfo=pytz.timezone("America/Toronto"))
start = start_of_day(timezone.datetime(2019, 12, 29, 11, 38, tzinfo=pytz.timezone("America/Toronto")))
end = end_of_day(timezone.datetime(2020, 1, 4, tzinfo=pytz.timezone("America/Toronto")))
r = relative_dates("last week", pivot)
assert r.start() == start
assert r.end() == end
def test_last_month(self):
start = start_of_day(timezone.datetime(2019, 12, 1, 11, 38, tzinfo=pytz.timezone("America/Toronto")))
end = end_of_day(timezone.datetime(2019, 12, 31, tzinfo=pytz.timezone("America/Toronto")))
r = relative_dates("last month", self.now)
assert r.start() == start
assert r.end() == end
def test_last_month_jan1(self):
pivot = timezone.datetime(2020, 1, 1, 11, 38, tzinfo=pytz.timezone("America/Toronto"))
start = start_of_day(timezone.datetime(2019, 12, 1, 11, 38, tzinfo=pytz.timezone("America/Toronto")))
end = end_of_day(timezone.datetime(2019, 12, 31, tzinfo=pytz.timezone("America/Toronto")))
r = relative_dates("last month", pivot)
assert r.start() == start
assert r.end() == end
def test_last_month_jan31(self):
pivot = timezone.datetime(2020, 1, 31, 11, 38, tzinfo=pytz.timezone("America/Toronto"))
start = start_of_day(timezone.datetime(2019, 12, 1, 11, 38, tzinfo=pytz.timezone("America/Toronto")))
end = end_of_day(timezone.datetime(2019, 12, 31, tzinfo=pytz.timezone("America/Toronto")))
r = relative_dates("last month", pivot)
assert r.start() == start
assert r.end() == end
def test_last_year(self):
start = start_of_day(timezone.datetime(2019, 1, 1, 11, 38, tzinfo=pytz.timezone("America/Toronto")))
end = end_of_day(timezone.datetime(2019, 12, 31, tzinfo=pytz.timezone("America/Toronto")))
r = relative_dates("last year", self.now)
assert r.start() == start
assert r.end() == end
def test_last_year_jan1(self):
pivot = timezone.datetime(2020, 1, 1, 11, 38, tzinfo=pytz.timezone("America/Toronto"))
start = start_of_day(timezone.datetime(2019, 1, 1, 11, 38, tzinfo=pytz.timezone("America/Toronto")))
end = end_of_day(timezone.datetime(2019, 12, 31, tzinfo=pytz.timezone("America/Toronto")))
r = relative_dates("last year", pivot)
assert r.start() == start
assert r.end() == end
def test_last_year_dec31(self):
pivot = timezone.datetime(2020, 12, 31, 11, 38, tzinfo=pytz.timezone("America/Toronto"))
start = start_of_day(timezone.datetime(2019, 1, 1, 11, 38, tzinfo=pytz.timezone("America/Toronto")))
end = end_of_day(timezone.datetime(2019, 12, 31, tzinfo=pytz.timezone("America/Toronto")))
r = relative_dates("last year", pivot)
assert r.start() == start
assert r.end() == end
def test_today(self):
start = self.day_start
end = end_of_day(start)
r = relative_dates("today", self.now)
assert r.start() == start
assert r.end() == end
| 45.980456
| 110
| 0.63637
| 1,981
| 14,116
| 4.404341
| 0.074205
| 0.130201
| 0.14808
| 0.202636
| 0.838166
| 0.825215
| 0.815817
| 0.792779
| 0.763782
| 0.755415
| 0
| 0.068645
| 0.21465
| 14,116
| 306
| 111
| 46.130719
| 0.718384
| 0.002905
| 0
| 0.533333
| 0
| 0
| 0.105836
| 0.003199
| 0
| 0
| 0
| 0
| 0.27451
| 1
| 0.156863
| false
| 0.019608
| 0.05098
| 0
| 0.219608
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
4b188ae0e512d9c128c010ae409e8f80e9a5b8ee
| 6,753
|
py
|
Python
|
preprocess/weixin_prepare.py
|
xuyuandong/sequence_behavior_ctr_model
|
e1bb71b4579456b1c6fbf3b432a84a3cb52611b7
|
[
"MIT"
] | 4
|
2020-01-08T13:39:59.000Z
|
2021-09-21T08:13:44.000Z
|
preprocess/weixin_prepare.py
|
xuyuandong/sequence_behavior_ctr_model
|
e1bb71b4579456b1c6fbf3b432a84a3cb52611b7
|
[
"MIT"
] | null | null | null |
preprocess/weixin_prepare.py
|
xuyuandong/sequence_behavior_ctr_model
|
e1bb71b4579456b1c6fbf3b432a84a3cb52611b7
|
[
"MIT"
] | 3
|
2020-01-09T02:45:14.000Z
|
2021-09-21T08:13:59.000Z
|
import random
import numpy as np
import cPickle as pkl
Train_handle = open("./data/weixin_data/weixin_train.txt",'w')
Test_handle = open("./data/weixin_data/weixin_test.txt",'w')
Feature_handle = open("./data/weixin_data/weixin_feature.pkl",'w')
max_len = 50
def produce_neg_item_hist_with_cate(train_file, test_file):
item_dict = {}
sample_count = 0
hist_seq = 0
for line in train_file:
units = line.strip().split("\t")
item_hist_list = units[8].split(",")
vmid_hist_list = units[9].split(",")
cate_hist_list = units[10].split(",")
hist_list = zip(item_hist_list, vmid_hist_list, cate_hist_list)
sample_count += 1
for item in hist_list:
item_dict.setdefault(str(item),0)
for line in test_file:
units = line.strip().split("\t")
item_hist_list = units[8].split(",")
vmid_hist_list = units[9].split(",")
cate_hist_list = units[10].split(",")
hist_list = zip(item_hist_list, vmid_hist_list, cate_hist_list)
sample_count += 1
for item in hist_list:
item_dict.setdefault(str(item),0)
#print item_dict.keys()[:10]
del(item_dict["('0', '0', '0')"])
neg_array = np.random.choice(np.array(item_dict.keys()), (sample_count, max_len*2))
neg_list = neg_array.tolist()
sample_count = 0
for line in train_file:
units = line.strip().split("\t")
item_hist_list = units[8].split(",")
vmid_hist_list = units[9].split(",")
cate_hist_list = units[10].split(",")
hist_list = zip(item_hist_list, vmid_hist_list, cate_hist_list)
hist_seq = len(hist_list)
neg_hist_list = []
while len(neg_hist_list) < hist_seq:
for item in neg_list[sample_count]:
item = eval(item)
if item not in hist_list:
neg_hist_list.append(item)
if len(neg_hist_list) == hist_seq:
break
sample_count += 1
neg_item_list, neg_vmid_list, neg_cate_list = zip(*neg_hist_list)
Train_handle.write(line.strip() + "\t" + ",".join(neg_item_list) + "\t" + ",".join(neg_vmid_list) + "\t" + ",".join(neg_cate_list) + "\n" )
for line in test_file:
units = line.strip().split("\t")
item_hist_list = units[8].split(",")
vmid_hist_list = units[9].split(",")
cate_hist_list = units[10].split(",")
hist_list = zip(item_hist_list, vmid_hist_list, cate_hist_list)
hist_seq = len(hist_list)
neg_hist_list = []
while len(neg_hist_list) < hist_seq:
for item in neg_list[sample_count]:
item = eval(item)
if item not in hist_list:
neg_hist_list.append(item)
if len(neg_hist_list) == hist_seq:
break
sample_count += 1
neg_item_list, neg_vmid_list, neg_cate_list = zip(*neg_hist_list)
Test_handle.write(line.strip() + "\t" + ",".join(neg_item_list) + "\t" + ",".join(neg_vmid_list) + "\t" + ",".join(neg_cate_list) + "\n" )
def generate_sample_list():
max_sides = 30
max_tags = 5
max_segs = 5
train_sample_list = []
test_sample_list = []
for line in file("./data/weixin_data/local_train.txt"):
units = line.strip().split("\t")
side_list = units[2].split(",")
if len(side_list) >= max_sides:
side_list = side_list[:max_sides]
else:
side_list = side_list + ['0']*(max_sides - len(side_list))
units[2] = ','.join(side_list)
if units[6] == '':
units[6] = '0'
tags_list = units[6].split(",")
if len(tags_list) >= max_tags:
tags_list = tags_list[:max_tags]
else:
tags_list = tags_list + ['0']*(max_tags - len(tags_list))
units[6] = ','.join(tags_list)
if units[7] == '':
units[7] = '0'
segs_list = units[7].split(",")
if len(segs_list) >= max_segs:
segs_list = tags_list[:max_segs]
else:
segs_list = segs_list + ['0']*(max_segs - len(segs_list))
units[7] = ','.join(segs_list)
item_hist_list = units[8].split(",")
vmid_hist_list = units[9].split(",")
cate_hist_list = units[10].split(",")
hist_list = zip(item_hist_list, vmid_hist_list, cate_hist_list)
hist_seq = len(hist_list)
if hist_seq > max_len:
hist_list = hist_list[-max_len:]
else:
hist_list = hist_list + [('0','0','0')]*(max_len-hist_seq)
item_list, vmid_list, cate_list = zip(*hist_list)
units[8] = ','.join(item_list)
units[9] = ','.join(vmid_list)
units[10] = ','.join(cate_list)
train_sample_list.append('\t'.join(units))
for line in file("./data/weixin_data/local_test.txt"):
units = line.strip().split("\t")
side_list = units[2].split(",")
if len(side_list) >= max_sides:
side_list = side_list[:max_sides]
else:
side_list = side_list + ['0']*(max_sides - len(side_list))
units[2] = ','.join(side_list)
if units[6] == '':
units[6] = '0'
tags_list = units[6].split(",")
if len(tags_list) >= max_tags:
tags_list = tags_list[:max_tags]
else:
tags_list = tags_list + ['0']*(max_tags - len(tags_list))
units[6] = ','.join(tags_list)
if units[7] == '':
units[7] = '0'
segs_list = units[7].split(",")
if len(segs_list) >= max_segs:
segs_list = tags_list[:max_segs]
else:
segs_list = segs_list + ['0']*(max_segs - len(segs_list))
units[7] = ','.join(segs_list)
item_hist_list = units[8].split(",")
vmid_hist_list = units[9].split(",")
cate_hist_list = units[10].split(",")
hist_list = zip(item_hist_list, vmid_hist_list, cate_hist_list)
hist_seq = len(hist_list)
if hist_seq > max_len:
hist_list = hist_list[-max_len:]
else:
hist_list = hist_list + [('0','0','0')]*(max_len-hist_seq)
item_list, vmid_list, cate_list = zip(*hist_list)
units[8] = ','.join(item_list)
units[9] = ','.join(vmid_list)
units[10] = ','.join(cate_list)
test_sample_list.append('\t'.join(units))
random.shuffle(train_sample_list)
return train_sample_list, test_sample_list
if __name__ == "__main__":
train_sample_list, test_sample_list = generate_sample_list()
produce_neg_item_hist_with_cate(train_sample_list, test_sample_list)
| 37.726257
| 147
| 0.568192
| 931
| 6,753
| 3.77014
| 0.084855
| 0.159544
| 0.074074
| 0.031909
| 0.886325
| 0.886325
| 0.814245
| 0.796581
| 0.778348
| 0.778348
| 0
| 0.018411
| 0.28417
| 6,753
| 178
| 148
| 37.938202
| 0.707695
| 0.003998
| 0
| 0.807692
| 0
| 0
| 0.043
| 0.02574
| 0
| 0
| 0
| 0
| 0
| 1
| 0.012821
| false
| 0
| 0.019231
| 0
| 0.038462
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.