repo
stringlengths
8
50
commit
stringlengths
40
40
path
stringlengths
5
171
lang
stringclasses
5 values
license
stringclasses
13 values
message
stringlengths
21
1.33k
old_code
stringlengths
15
2.4k
new_code
stringlengths
140
2.61k
n_added
int64
0
81
n_removed
int64
0
58
n_hunks
int64
1
8
change_kind
stringclasses
3 values
udiff
stringlengths
88
3.33k
udiff-h
stringlengths
85
3.32k
udiff-l
stringlengths
95
3.57k
search-replace
stringlengths
89
3.36k
mishbahr/staticgen-demo
2d9fce5715b2d7d5b920d2e77212f076e9ebd1be
staticgen_demo/staticgen_views.py
python
bsd-3-clause
Add CMS Pages to staticgen registry.
# -*- coding: utf-8 -*- from __future__ import unicode_literals from staticgen.staticgen_pool import staticgen_pool from staticgen.staticgen_views import StaticgenView class StaicgenDemoStaticViews(StaticgenView): def items(self): return ( 'sitemap.xml', 'robots.txt', 'page_not_found', 'server_error', ) staticgen_pool.register(StaicgenDemoStaticViews)
# -*- coding: utf-8 -*- from __future__ import unicode_literals from django.conf import settings from django.utils import translation from staticgen.staticgen_pool import staticgen_pool from staticgen.staticgen_views import StaticgenView class StaicgenDemoStaticViews(StaticgenView): def items(self): return ( 'sitemap.xml', 'robots.txt', 'page_not_found', 'server_error', ) staticgen_pool.register(StaicgenDemoStaticViews) class StaticgenCMSView(StaticgenView): def items(self): try: from cms.models import Title except ImportError: # pragma: no cover # django-cms is not installed. return super(StaticgenCMSView, self).items() items = Title.objects.public().filter( page__login_required=False, page__site_id=settings.SITE_ID, ).order_by('page__path') return items def url(self, obj): translation.activate(obj.language) url = obj.page.get_absolute_url(obj.language) translation.deactivate() return url staticgen_pool.register(StaticgenCMSView)
27
0
2
add_only
--- a/staticgen_demo/staticgen_views.py +++ b/staticgen_demo/staticgen_views.py @@ -3,2 +3,5 @@ from __future__ import unicode_literals + +from django.conf import settings +from django.utils import translation @@ -19 +22,25 @@ staticgen_pool.register(StaicgenDemoStaticViews) + + +class StaticgenCMSView(StaticgenView): + + def items(self): + try: + from cms.models import Title + except ImportError: # pragma: no cover + # django-cms is not installed. + return super(StaticgenCMSView, self).items() + + items = Title.objects.public().filter( + page__login_required=False, + page__site_id=settings.SITE_ID, + ).order_by('page__path') + return items + + def url(self, obj): + translation.activate(obj.language) + url = obj.page.get_absolute_url(obj.language) + translation.deactivate() + return url + +staticgen_pool.register(StaticgenCMSView)
--- a/staticgen_demo/staticgen_views.py +++ b/staticgen_demo/staticgen_views.py @@ ... @@ from __future__ import unicode_literals + +from django.conf import settings +from django.utils import translation @@ ... @@ staticgen_pool.register(StaicgenDemoStaticViews) + + +class StaticgenCMSView(StaticgenView): + + def items(self): + try: + from cms.models import Title + except ImportError: # pragma: no cover + # django-cms is not installed. + return super(StaticgenCMSView, self).items() + + items = Title.objects.public().filter( + page__login_required=False, + page__site_id=settings.SITE_ID, + ).order_by('page__path') + return items + + def url(self, obj): + translation.activate(obj.language) + url = obj.page.get_absolute_url(obj.language) + translation.deactivate() + return url + +staticgen_pool.register(StaticgenCMSView)
--- a/staticgen_demo/staticgen_views.py +++ b/staticgen_demo/staticgen_views.py @@ -3,2 +3,5 @@ CON from __future__ import unicode_literals ADD ADD from django.conf import settings ADD from django.utils import translation CON @@ -19 +22,25 @@ CON staticgen_pool.register(StaicgenDemoStaticViews) ADD ADD ADD class StaticgenCMSView(StaticgenView): ADD ADD def items(self): ADD try: ADD from cms.models import Title ADD except ImportError: # pragma: no cover ADD # django-cms is not installed. ADD return super(StaticgenCMSView, self).items() ADD ADD items = Title.objects.public().filter( ADD page__login_required=False, ADD page__site_id=settings.SITE_ID, ADD ).order_by('page__path') ADD return items ADD ADD def url(self, obj): ADD translation.activate(obj.language) ADD url = obj.page.get_absolute_url(obj.language) ADD translation.deactivate() ADD return url ADD ADD staticgen_pool.register(StaticgenCMSView)
<<<<<<< SEARCH from __future__ import unicode_literals from staticgen.staticgen_pool import staticgen_pool ======= from __future__ import unicode_literals from django.conf import settings from django.utils import translation from staticgen.staticgen_pool import staticgen_pool >>>>>>> REPLACE <<<<<<< SEARCH staticgen_pool.register(StaicgenDemoStaticViews) ======= staticgen_pool.register(StaicgenDemoStaticViews) class StaticgenCMSView(StaticgenView): def items(self): try: from cms.models import Title except ImportError: # pragma: no cover # django-cms is not installed. return super(StaticgenCMSView, self).items() items = Title.objects.public().filter( page__login_required=False, page__site_id=settings.SITE_ID, ).order_by('page__path') return items def url(self, obj): translation.activate(obj.language) url = obj.page.get_absolute_url(obj.language) translation.deactivate() return url staticgen_pool.register(StaticgenCMSView) >>>>>>> REPLACE
microscope-mobile/ionic-blank
7ca84f2dd8c83ea75c7065d4562c41b498f7012f
tasks/build.js
javascript
mit
Update path fonts gulp task
var gulp = require('gulp'); var browserify = require('browserify'); var source = require('vinyl-source-stream'); var uglify = require('gulp-uglify'); var buffer = require('vinyl-buffer'); // build src gulp.task('browserify', function(cb){ return browserify('./src/app.js', { debug: true }) .bundle() .pipe(source('bundle.js')) .pipe(gulp.dest('./www/')); cb(); }); // build:release gulp.task('browserify:release', function(cb){ return browserify('./src/app.js') .bundle() .pipe(source('bundle.js')) .pipe(buffer()) .pipe(uglify()) .pipe(gulp.dest('./www/')); cb(); }); gulp.task('fonts', function(cb){ return gulp.src('node_modules/ionic-framework/release/fonts/**') .pipe(gulp.dest('./www/fonts/')); cb(); }); gulp.task('assets', function(cb){ return gulp.src('./assets/**') .pipe(gulp.dest('./www/')); cb(); }); // copy templates gulp.task('templates', function(cb){ return gulp.src('./src/**/*.html') .pipe(gulp.dest('./www/')); cb(); });
var gulp = require('gulp'); var browserify = require('browserify'); var source = require('vinyl-source-stream'); var uglify = require('gulp-uglify'); var buffer = require('vinyl-buffer'); // build src gulp.task('browserify', function(cb){ return browserify('./src/app.js', { debug: true }) .bundle() .pipe(source('bundle.js')) .pipe(gulp.dest('./www/')); cb(); }); // build:release gulp.task('browserify:release', function(cb){ return browserify('./src/app.js') .bundle() .pipe(source('bundle.js')) .pipe(buffer()) .pipe(uglify()) .pipe(gulp.dest('./www/')); cb(); }); gulp.task('fonts', function(cb){ return gulp.src('node_modules/ionic-npm/fonts/**') .pipe(gulp.dest('./www/fonts/')); cb(); }); gulp.task('assets', function(cb){ return gulp.src('./assets/**') .pipe(gulp.dest('./www/')); cb(); }); // copy templates gulp.task('templates', function(cb){ return gulp.src('./src/**/*.html') .pipe(gulp.dest('./www/')); cb(); });
1
1
1
mixed
--- a/tasks/build.js +++ b/tasks/build.js @@ -32,3 +32,3 @@ gulp.task('fonts', function(cb){ - return gulp.src('node_modules/ionic-framework/release/fonts/**') + return gulp.src('node_modules/ionic-npm/fonts/**') .pipe(gulp.dest('./www/fonts/'));
--- a/tasks/build.js +++ b/tasks/build.js @@ ... @@ gulp.task('fonts', function(cb){ - return gulp.src('node_modules/ionic-framework/release/fonts/**') + return gulp.src('node_modules/ionic-npm/fonts/**') .pipe(gulp.dest('./www/fonts/'));
--- a/tasks/build.js +++ b/tasks/build.js @@ -32,3 +32,3 @@ CON gulp.task('fonts', function(cb){ DEL return gulp.src('node_modules/ionic-framework/release/fonts/**') ADD return gulp.src('node_modules/ionic-npm/fonts/**') CON .pipe(gulp.dest('./www/fonts/'));
<<<<<<< SEARCH gulp.task('fonts', function(cb){ return gulp.src('node_modules/ionic-framework/release/fonts/**') .pipe(gulp.dest('./www/fonts/')); cb(); ======= gulp.task('fonts', function(cb){ return gulp.src('node_modules/ionic-npm/fonts/**') .pipe(gulp.dest('./www/fonts/')); cb(); >>>>>>> REPLACE <<<<<<< SEARCH .pipe(gulp.dest('./www/')); cb(); }); ======= .pipe(gulp.dest('./www/')); cb(); }); >>>>>>> REPLACE
dev-brutus/427
cd6738fd9d90b6ab14ac89582f65d174f6f89bd6
main.js
javascript
bsd-3-clause
Change symbols generation and checks
var LOCK = false; function print(text, nextAction) { if (text) { LOCK = true; var display = document.getElementById("display"); display.innerHTML = display.innerHTML + text.substr(0, 1); display.scrollTop = display.scrollHeight; var newText = text.substring(1, text.length); window.setTimeout(function () { print(newText, nextAction); }, 50); } else { LOCK = false; if (nextAction) { nextAction(); } } } function generateTask() { var randomChar = String.fromCharCode(Math.floor(Math.random() * 27) + 65); window.setTimeout(function () { var message = "\nPlease, press key '" + randomChar + "'\n" print(message, function () { document.onkeydown = function (e) { e = e || event; var ch = String.fromCharCode(e.keyCode); if (ch >= 'A' && ch <= 'Z') { if (!LOCK) { if (ch == randomChar) { document.onkeydown = undefined; print("OK... \n", generateTask); } else { print("Wrong!!! \n" + message); } } return false; } return true; } }); }, Math.floor(Math.random() * 10000)); }
var LOCK = false; function print(text, nextAction) { if (text) { LOCK = true; var display = document.getElementById("display"); display.innerHTML = display.innerHTML + text.substr(0, 1); display.scrollTop = display.scrollHeight; var newText = text.substring(1, text.length); window.setTimeout(function () { print(newText, nextAction); }, 50); } else { LOCK = false; if (nextAction) { nextAction(); } } } function generateTask() { var randomChar = String.fromCharCode(Math.floor(Math.random() * 26) + 65); window.setTimeout(function () { var message = "\nPlease, press key '" + randomChar + "'\n" print(message, function () { document.onkeydown = function (e) { var keyCode = (e || event).keyCode; var ch = String.fromCharCode(keyCode); if (keyCode >= 0 && keyCode <= 127) { if (!LOCK) { if (ch == randomChar) { document.onkeydown = undefined; print("OK... \n", generateTask); } else { print("Wrong!!! \n" + message); } } return false; } return true; } }); }, Math.floor(Math.random() * 10000)); }
4
4
2
mixed
--- a/main.js +++ b/main.js @@ -21,3 +21,3 @@ function generateTask() { - var randomChar = String.fromCharCode(Math.floor(Math.random() * 27) + 65); + var randomChar = String.fromCharCode(Math.floor(Math.random() * 26) + 65); @@ -27,5 +27,5 @@ document.onkeydown = function (e) { - e = e || event; - var ch = String.fromCharCode(e.keyCode); - if (ch >= 'A' && ch <= 'Z') { + var keyCode = (e || event).keyCode; + var ch = String.fromCharCode(keyCode); + if (keyCode >= 0 && keyCode <= 127) { if (!LOCK) {
--- a/main.js +++ b/main.js @@ ... @@ function generateTask() { - var randomChar = String.fromCharCode(Math.floor(Math.random() * 27) + 65); + var randomChar = String.fromCharCode(Math.floor(Math.random() * 26) + 65); @@ ... @@ document.onkeydown = function (e) { - e = e || event; - var ch = String.fromCharCode(e.keyCode); - if (ch >= 'A' && ch <= 'Z') { + var keyCode = (e || event).keyCode; + var ch = String.fromCharCode(keyCode); + if (keyCode >= 0 && keyCode <= 127) { if (!LOCK) {
--- a/main.js +++ b/main.js @@ -21,3 +21,3 @@ CON function generateTask() { DEL var randomChar = String.fromCharCode(Math.floor(Math.random() * 27) + 65); ADD var randomChar = String.fromCharCode(Math.floor(Math.random() * 26) + 65); CON @@ -27,5 +27,5 @@ CON document.onkeydown = function (e) { DEL e = e || event; DEL var ch = String.fromCharCode(e.keyCode); DEL if (ch >= 'A' && ch <= 'Z') { ADD var keyCode = (e || event).keyCode; ADD var ch = String.fromCharCode(keyCode); ADD if (keyCode >= 0 && keyCode <= 127) { CON if (!LOCK) {
<<<<<<< SEARCH function generateTask() { var randomChar = String.fromCharCode(Math.floor(Math.random() * 27) + 65); window.setTimeout(function () { var message = "\nPlease, press key '" + randomChar + "'\n" print(message, function () { document.onkeydown = function (e) { e = e || event; var ch = String.fromCharCode(e.keyCode); if (ch >= 'A' && ch <= 'Z') { if (!LOCK) { if (ch == randomChar) { ======= function generateTask() { var randomChar = String.fromCharCode(Math.floor(Math.random() * 26) + 65); window.setTimeout(function () { var message = "\nPlease, press key '" + randomChar + "'\n" print(message, function () { document.onkeydown = function (e) { var keyCode = (e || event).keyCode; var ch = String.fromCharCode(keyCode); if (keyCode >= 0 && keyCode <= 127) { if (!LOCK) { if (ch == randomChar) { >>>>>>> REPLACE <<<<<<< SEARCH }); }, Math.floor(Math.random() * 10000)); } ======= }); }, Math.floor(Math.random() * 10000)); } >>>>>>> REPLACE
alanc10n/py-cutplanner
90f7bcb7ab6a43e0d116d6c9e71cc94977c6479c
cutplanner/planner.py
python
mit
Add function for next_fit algorithm
import collections from stock import Stock # simple structure to keep track of a specific piece Piece = collections.namedtuple('Piece', 'id, length') class Planner(object): def __init__(self, sizes, needed, loss=0.25): self.stock = [] self.stock_sizes = sorted(sizes) self.pieces_needed = needed.reverse self.cut_loss = loss self.cur_stock = None @property def largest_stock(self): return self.stock_sizes[-1] def cut_piece(self, piece): """ Record the cut for the given piece """ self.cur_stock.cut(piece, self.cut_loss) def finalize_stock(self): """ Takes current stock out of use, attempts to shrink """ # shrink as much as possible for smaller in self.stock_sizes[-2::-1]: if self.cur_stock.shrink(smaller) is None: break self.stock.append(self.cur_stock)
import collections from stock import Stock # simple structure to keep track of a specific piece Piece = collections.namedtuple('Piece', 'id, length') class Planner(object): def __init__(self, sizes, needed, loss=0.25): self.stock = [] self.stock_sizes = sorted(sizes) self.pieces_needed = needed.reverse self.cut_loss = loss self.cur_stock = None @property def largest_stock(self): return self.stock_sizes[-1] def cut_piece(self, piece): """ Record the cut for the given piece """ self.cur_stock.cut(piece, self.cut_loss) def finalize_stock(self): """ Takes current stock out of use, attempts to shrink """ # shrink as much as possible for smaller in self.stock_sizes[-2::-1]: if self.cur_stock.shrink(smaller) is None: break self.stock.append(self.cur_stock) def apply_next_fit(self, piece): """ Cut from current stock until unable, then move to new stock """ if self.cur_stock.remaining_length < piece.length + self.cut_loss: # finalize current stock and get fresh stock self.finalize_stock() cur_stock = Stock(self.largest_stock) self.cur_stock.cut(piece, self.cut_loss)
8
0
1
add_only
--- a/cutplanner/planner.py +++ b/cutplanner/planner.py @@ -32 +32,9 @@ + def apply_next_fit(self, piece): + """ Cut from current stock until unable, then move to new stock """ + if self.cur_stock.remaining_length < piece.length + self.cut_loss: + # finalize current stock and get fresh stock + self.finalize_stock() + cur_stock = Stock(self.largest_stock) + + self.cur_stock.cut(piece, self.cut_loss)
--- a/cutplanner/planner.py +++ b/cutplanner/planner.py @@ ... @@ + def apply_next_fit(self, piece): + """ Cut from current stock until unable, then move to new stock """ + if self.cur_stock.remaining_length < piece.length + self.cut_loss: + # finalize current stock and get fresh stock + self.finalize_stock() + cur_stock = Stock(self.largest_stock) + + self.cur_stock.cut(piece, self.cut_loss)
--- a/cutplanner/planner.py +++ b/cutplanner/planner.py @@ -32 +32,9 @@ CON ADD def apply_next_fit(self, piece): ADD """ Cut from current stock until unable, then move to new stock """ ADD if self.cur_stock.remaining_length < piece.length + self.cut_loss: ADD # finalize current stock and get fresh stock ADD self.finalize_stock() ADD cur_stock = Stock(self.largest_stock) ADD ADD self.cur_stock.cut(piece, self.cut_loss)
<<<<<<< SEARCH self.stock.append(self.cur_stock) ======= self.stock.append(self.cur_stock) def apply_next_fit(self, piece): """ Cut from current stock until unable, then move to new stock """ if self.cur_stock.remaining_length < piece.length + self.cut_loss: # finalize current stock and get fresh stock self.finalize_stock() cur_stock = Stock(self.largest_stock) self.cur_stock.cut(piece, self.cut_loss) >>>>>>> REPLACE
gyn/exercism
3937ee938b3de9e5d20bdcf21f79f44563f57b47
rust/luhn/src/lib.rs
rust
bsd-2-clause
Change luhn code to simplify I still not decide to use the functional style for this case
pub fn is_valid(id: &str) -> bool { // check the length if id.len() < 2 { return false; } let mut index: u32 = 0; let mut number: Vec<u32> = Vec::new(); for c in id.chars().rev() { if c.is_whitespace() { continue; } let mut v: u32; match c.to_digit(10) { Some(d) => v = d, None => return false, } if index % 2 != 0 { v *= 2; if v > 9 { v -= 9; } } index += 1; number.push(v); } if number.len() == 1 { return false; } let r: u32 = number.iter().sum(); (r % 10) == 0 }
pub fn is_valid(id: &str) -> bool { // check the length if id.len() < 2 { return false; } let mut index: u32 = 0; let mut sum: u32 = 0; for c in id.chars().rev() { if c.is_whitespace() { continue; } let mut v: u32; match c.to_digit(10) { Some(d) => v = d, None => return false, } if index % 2 != 0 { v *= 2; if v > 9 { v -= 9; } } index += 1; sum += v; } if index < 2 { return false; } (sum % 10) == 0 }
4
7
3
mixed
--- a/rust/luhn/src/lib.rs +++ b/rust/luhn/src/lib.rs @@ -7,3 +7,3 @@ let mut index: u32 = 0; - let mut number: Vec<u32> = Vec::new(); + let mut sum: u32 = 0; @@ -31,7 +31,6 @@ index += 1; - - number.push(v); + sum += v; } - if number.len() == 1 { + if index < 2 { return false; @@ -39,5 +38,3 @@ - let r: u32 = number.iter().sum(); - - (r % 10) == 0 + (sum % 10) == 0 }
--- a/rust/luhn/src/lib.rs +++ b/rust/luhn/src/lib.rs @@ ... @@ let mut index: u32 = 0; - let mut number: Vec<u32> = Vec::new(); + let mut sum: u32 = 0; @@ ... @@ index += 1; - - number.push(v); + sum += v; } - if number.len() == 1 { + if index < 2 { return false; @@ ... @@ - let r: u32 = number.iter().sum(); - - (r % 10) == 0 + (sum % 10) == 0 }
--- a/rust/luhn/src/lib.rs +++ b/rust/luhn/src/lib.rs @@ -7,3 +7,3 @@ CON let mut index: u32 = 0; DEL let mut number: Vec<u32> = Vec::new(); ADD let mut sum: u32 = 0; CON @@ -31,7 +31,6 @@ CON index += 1; DEL DEL number.push(v); ADD sum += v; CON } CON DEL if number.len() == 1 { ADD if index < 2 { CON return false; @@ -39,5 +38,3 @@ CON DEL let r: u32 = number.iter().sum(); DEL DEL (r % 10) == 0 ADD (sum % 10) == 0 CON }
<<<<<<< SEARCH let mut index: u32 = 0; let mut number: Vec<u32> = Vec::new(); for c in id.chars().rev() { ======= let mut index: u32 = 0; let mut sum: u32 = 0; for c in id.chars().rev() { >>>>>>> REPLACE <<<<<<< SEARCH index += 1; number.push(v); } if number.len() == 1 { return false; } let r: u32 = number.iter().sum(); (r % 10) == 0 } ======= index += 1; sum += v; } if index < 2 { return false; } (sum % 10) == 0 } >>>>>>> REPLACE
kave/cfgov-refresh
8ca7b3bfcb49ed884de61ae2211e400abbe6f758
test/browser_tests/jenkins_conf.js
javascript
cc0-1.0
Add sauce tunnel config for jenkins
'use strict'; exports.config = { framework: 'jasmine2', specs: [ 'spec_suites/shared/*.js' ], capabilities: { browserName: 'chrome', name: 'flapjack-browser-tests' }, sauceUser: process.env.SAUCE_USER, sauceKey: process.env.SAUCE_KEY, onPrepare: function() { browser.ignoreSynchronization = true; var JasmineReporters = require( 'jasmine-reporters' ); var mkdirp = require( 'mkdirp' ); var JasmineSpecReporter = require( 'jasmine-spec-reporter' ); // add jasmine spec reporter jasmine.getEnv().addReporter( new JasmineSpecReporter( { displayStacktrace: true } ) ); var newFolder = 'reports/'; mkdirp( newFolder, function( err ) { if ( err ) { console.error( err ); } else { var jUnitXmlReporter = new JasmineReporters.JUnitXmlReporter( { consolidateAll: true, savePath: newFolder, filePrefix: 'test-results' } ); jasmine.getEnv().addReporter( jUnitXmlReporter ); } } ); } };
'use strict'; exports.config = { framework: 'jasmine2', specs: [ 'spec_suites/shared/*.js' ], capabilities: { 'browserName': 'chrome', 'name': 'flapjack-browser-tests ' + process.env.SITE_DESC, 'tunnel-identifier': process.env.SAUCE_TUNNEL }, sauceUser: process.env.SAUCE_USER, sauceKey: process.env.SAUCE_KEY, onPrepare: function() { browser.ignoreSynchronization = true; var JasmineReporters = require( 'jasmine-reporters' ); var mkdirp = require( 'mkdirp' ); var JasmineSpecReporter = require( 'jasmine-spec-reporter' ); // add jasmine spec reporter jasmine.getEnv().addReporter( new JasmineSpecReporter( { displayStacktrace: true } ) ); var newFolder = 'reports/'; mkdirp( newFolder, function( err ) { if ( err ) { console.error( err ); } else { var jUnitXmlReporter = new JasmineReporters.JUnitXmlReporter( { consolidateAll: true, savePath: newFolder, filePrefix: 'test-results' } ); jasmine.getEnv().addReporter( jUnitXmlReporter ); } } ); } };
3
2
1
mixed
--- a/test/browser_tests/jenkins_conf.js +++ b/test/browser_tests/jenkins_conf.js @@ -6,4 +6,5 @@ capabilities: { - browserName: 'chrome', - name: 'flapjack-browser-tests' + 'browserName': 'chrome', + 'name': 'flapjack-browser-tests ' + process.env.SITE_DESC, + 'tunnel-identifier': process.env.SAUCE_TUNNEL },
--- a/test/browser_tests/jenkins_conf.js +++ b/test/browser_tests/jenkins_conf.js @@ ... @@ capabilities: { - browserName: 'chrome', - name: 'flapjack-browser-tests' + 'browserName': 'chrome', + 'name': 'flapjack-browser-tests ' + process.env.SITE_DESC, + 'tunnel-identifier': process.env.SAUCE_TUNNEL },
--- a/test/browser_tests/jenkins_conf.js +++ b/test/browser_tests/jenkins_conf.js @@ -6,4 +6,5 @@ CON capabilities: { DEL browserName: 'chrome', DEL name: 'flapjack-browser-tests' ADD 'browserName': 'chrome', ADD 'name': 'flapjack-browser-tests ' + process.env.SITE_DESC, ADD 'tunnel-identifier': process.env.SAUCE_TUNNEL CON },
<<<<<<< SEARCH specs: [ 'spec_suites/shared/*.js' ], capabilities: { browserName: 'chrome', name: 'flapjack-browser-tests' }, ======= specs: [ 'spec_suites/shared/*.js' ], capabilities: { 'browserName': 'chrome', 'name': 'flapjack-browser-tests ' + process.env.SITE_DESC, 'tunnel-identifier': process.env.SAUCE_TUNNEL }, >>>>>>> REPLACE
jhelwig/homers
265f057f25bc50ea8cc926a924336d3ca27ade6c
src/main.rs
rust
mit
Return an error result, instead of pretending unimplemented commands are ok
#[macro_use] extern crate clap; #[macro_use] extern crate log; extern crate git2; mod cd; mod cli; mod logger; mod repository; mod settings; use std::process::exit; use settings::Settings; fn main() { match run_cli() { Ok(_) => {} Err((message, exit_code)) => { error!("{}", message); exit(exit_code); } } } fn run_cli() -> Result<(), (String, i32)> { let matches = cli::app().get_matches(); let settings = Settings::from_matches(&matches); logger::init(&settings).expect("Failed to initialize logger."); match matches.subcommand() { ("cd", Some(m)) => try!(cd::open_shell_at_repo(&settings, m)), ("clone", Some(m)) => try!(repository::clone(&settings, m)), ("link", Some(m)) => try!(repository::link_repo(&settings, m)), (name, _) => println!("'{}' is not implemented yet.", name), }; Ok(()) }
#[macro_use] extern crate clap; #[macro_use] extern crate log; extern crate git2; mod cd; mod cli; mod logger; mod repository; mod settings; use std::process::exit; use settings::Settings; fn main() { match run_cli() { Ok(_) => {} Err((message, exit_code)) => { error!("{}", message); exit(exit_code); } } } fn run_cli() -> Result<(), (String, i32)> { let matches = cli::app().get_matches(); let settings = Settings::from_matches(&matches); logger::init(&settings).expect("Failed to initialize logger."); match matches.subcommand() { ("cd", Some(m)) => try!(cd::open_shell_at_repo(&settings, m)), ("clone", Some(m)) => try!(repository::clone(&settings, m)), ("link", Some(m)) => try!(repository::link_repo(&settings, m)), (name, _) => { return Err((format!("'{}' is not implemented yet.", name), 1)); } }; Ok(()) }
3
1
1
mixed
--- a/src/main.rs +++ b/src/main.rs @@ -36,3 +36,5 @@ ("link", Some(m)) => try!(repository::link_repo(&settings, m)), - (name, _) => println!("'{}' is not implemented yet.", name), + (name, _) => { + return Err((format!("'{}' is not implemented yet.", name), 1)); + } };
--- a/src/main.rs +++ b/src/main.rs @@ ... @@ ("link", Some(m)) => try!(repository::link_repo(&settings, m)), - (name, _) => println!("'{}' is not implemented yet.", name), + (name, _) => { + return Err((format!("'{}' is not implemented yet.", name), 1)); + } };
--- a/src/main.rs +++ b/src/main.rs @@ -36,3 +36,5 @@ CON ("link", Some(m)) => try!(repository::link_repo(&settings, m)), DEL (name, _) => println!("'{}' is not implemented yet.", name), ADD (name, _) => { ADD return Err((format!("'{}' is not implemented yet.", name), 1)); ADD } CON };
<<<<<<< SEARCH ("clone", Some(m)) => try!(repository::clone(&settings, m)), ("link", Some(m)) => try!(repository::link_repo(&settings, m)), (name, _) => println!("'{}' is not implemented yet.", name), }; ======= ("clone", Some(m)) => try!(repository::clone(&settings, m)), ("link", Some(m)) => try!(repository::link_repo(&settings, m)), (name, _) => { return Err((format!("'{}' is not implemented yet.", name), 1)); } }; >>>>>>> REPLACE
napalm-automation/napalm-logs
375b26fbb6e5ba043a1017e28027241c12374207
napalm_logs/transport/zeromq.py
python
apache-2.0
Raise bind exception and log
# -*- coding: utf-8 -*- ''' ZeroMQ transport for napalm-logs. ''' from __future__ import absolute_import from __future__ import unicode_literals # Import stdlib import json # Import third party libs import zmq # Import napalm-logs pkgs from napalm_logs.transport.base import TransportBase class ZMQTransport(TransportBase): ''' ZMQ transport class. ''' def __init__(self, addr, port): self.addr = addr self.port = port def start(self): self.context = zmq.Context() self.socket = self.context.socket(zmq.PUB) self.socket.bind('tcp://{addr}:{port}'.format( addr=self.addr, port=self.port) ) def serialise(self, obj): return json.dumps(obj) def publish(self, obj): self.socket.send( self.serialise(obj) ) def tear_down(self): if hasattr(self, 'socket'): self.socket.close() if hasattr(self, 'context'): self.context.term()
# -*- coding: utf-8 -*- ''' ZeroMQ transport for napalm-logs. ''' from __future__ import absolute_import from __future__ import unicode_literals # Import stdlib import json import logging # Import third party libs import zmq # Import napalm-logs pkgs from napalm_logs.exceptions import BindException from napalm_logs.transport.base import TransportBase log = logging.getLogger(__name__) class ZMQTransport(TransportBase): ''' ZMQ transport class. ''' def __init__(self, addr, port): self.addr = addr self.port = port def start(self): self.context = zmq.Context() self.socket = self.context.socket(zmq.PUB) try: self.socket.bind('tcp://{addr}:{port}'.format( addr=self.addr, port=self.port) ) except zmq.error.ZMQError as err: log.error(err, exc_info=True) raise BindException(err) def serialise(self, obj): return json.dumps(obj) def publish(self, obj): self.socket.send( self.serialise(obj) ) def tear_down(self): if hasattr(self, 'socket'): self.socket.close() if hasattr(self, 'context'): self.context.term()
12
4
3
mixed
--- a/napalm_logs/transport/zeromq.py +++ b/napalm_logs/transport/zeromq.py @@ -9,2 +9,3 @@ import json +import logging @@ -14,3 +15,6 @@ # Import napalm-logs pkgs +from napalm_logs.exceptions import BindException from napalm_logs.transport.base import TransportBase + +log = logging.getLogger(__name__) @@ -28,6 +32,10 @@ self.socket = self.context.socket(zmq.PUB) - self.socket.bind('tcp://{addr}:{port}'.format( - addr=self.addr, - port=self.port) - ) + try: + self.socket.bind('tcp://{addr}:{port}'.format( + addr=self.addr, + port=self.port) + ) + except zmq.error.ZMQError as err: + log.error(err, exc_info=True) + raise BindException(err)
--- a/napalm_logs/transport/zeromq.py +++ b/napalm_logs/transport/zeromq.py @@ ... @@ import json +import logging @@ ... @@ # Import napalm-logs pkgs +from napalm_logs.exceptions import BindException from napalm_logs.transport.base import TransportBase + +log = logging.getLogger(__name__) @@ ... @@ self.socket = self.context.socket(zmq.PUB) - self.socket.bind('tcp://{addr}:{port}'.format( - addr=self.addr, - port=self.port) - ) + try: + self.socket.bind('tcp://{addr}:{port}'.format( + addr=self.addr, + port=self.port) + ) + except zmq.error.ZMQError as err: + log.error(err, exc_info=True) + raise BindException(err)
--- a/napalm_logs/transport/zeromq.py +++ b/napalm_logs/transport/zeromq.py @@ -9,2 +9,3 @@ CON import json ADD import logging CON @@ -14,3 +15,6 @@ CON # Import napalm-logs pkgs ADD from napalm_logs.exceptions import BindException CON from napalm_logs.transport.base import TransportBase ADD ADD log = logging.getLogger(__name__) CON @@ -28,6 +32,10 @@ CON self.socket = self.context.socket(zmq.PUB) DEL self.socket.bind('tcp://{addr}:{port}'.format( DEL addr=self.addr, DEL port=self.port) DEL ) ADD try: ADD self.socket.bind('tcp://{addr}:{port}'.format( ADD addr=self.addr, ADD port=self.port) ADD ) ADD except zmq.error.ZMQError as err: ADD log.error(err, exc_info=True) ADD raise BindException(err) CON
<<<<<<< SEARCH # Import stdlib import json # Import third party libs import zmq # Import napalm-logs pkgs from napalm_logs.transport.base import TransportBase ======= # Import stdlib import json import logging # Import third party libs import zmq # Import napalm-logs pkgs from napalm_logs.exceptions import BindException from napalm_logs.transport.base import TransportBase log = logging.getLogger(__name__) >>>>>>> REPLACE <<<<<<< SEARCH self.context = zmq.Context() self.socket = self.context.socket(zmq.PUB) self.socket.bind('tcp://{addr}:{port}'.format( addr=self.addr, port=self.port) ) def serialise(self, obj): ======= self.context = zmq.Context() self.socket = self.context.socket(zmq.PUB) try: self.socket.bind('tcp://{addr}:{port}'.format( addr=self.addr, port=self.port) ) except zmq.error.ZMQError as err: log.error(err, exc_info=True) raise BindException(err) def serialise(self, obj): >>>>>>> REPLACE
koa/hector
43fe84036738f6fe0743651b12ec14b44c6f55cd
src/main/java/me/prettyprint/cassandra/service/ExampleClient.java
java
mit
Add a load balanced client example
package me.prettyprint.cassandra.service; import static me.prettyprint.cassandra.utils.StringUtils.bytes; import static me.prettyprint.cassandra.utils.StringUtils.string; import org.apache.cassandra.service.Column; import org.apache.cassandra.service.ColumnPath; /** * Example client that uses the cassandra hector client. * * @author Ran Tavory ([email protected]) * */ public class ExampleClient { public static void main(String[] args) throws IllegalStateException, PoolExhaustedException, Exception { CassandraClientPool pool = CassandraClientPoolFactory.INSTANCE.get(); CassandraClient client = pool.borrowClient("tush", 9160); try { Keyspace keyspace = client.getKeyspace("Keyspace1"); ColumnPath columnPath = new ColumnPath("Standard1", null, bytes("column-name")); // insert keyspace.insert("key", columnPath, bytes("value")); // read Column col = keyspace.getColumn("key", columnPath); System.out.println("Read from cassandra: " + string(col.getValue())); } finally { // return client to pool. do it in a finally block to make sure it's executed pool.releaseClient(client); } } }
package me.prettyprint.cassandra.service; import static me.prettyprint.cassandra.utils.StringUtils.bytes; import static me.prettyprint.cassandra.utils.StringUtils.string; import org.apache.cassandra.service.Column; import org.apache.cassandra.service.ColumnPath; /** * Example client that uses the cassandra hector client. * * @author Ran Tavory ([email protected]) * */ public class ExampleClient { public static void main(String[] args) throws IllegalStateException, PoolExhaustedException, Exception { CassandraClientPool pool = CassandraClientPoolFactory.INSTANCE.get(); CassandraClient client = pool.borrowClient("tush", 9160); // A load balanced version would look like this: // CassandraClient client = pool.borrowClient(new String[] {"cas1:9160", "cas2:9160", "cas3:9160"}); try { Keyspace keyspace = client.getKeyspace("Keyspace1"); ColumnPath columnPath = new ColumnPath("Standard1", null, bytes("column-name")); // insert keyspace.insert("key", columnPath, bytes("value")); // read Column col = keyspace.getColumn("key", columnPath); System.out.println("Read from cassandra: " + string(col.getValue())); } finally { // return client to pool. do it in a finally block to make sure it's executed pool.releaseClient(client); } } }
3
0
1
add_only
--- a/src/main/java/me/prettyprint/cassandra/service/ExampleClient.java +++ b/src/main/java/me/prettyprint/cassandra/service/ExampleClient.java @@ -19,2 +19,5 @@ CassandraClient client = pool.borrowClient("tush", 9160); + // A load balanced version would look like this: + // CassandraClient client = pool.borrowClient(new String[] {"cas1:9160", "cas2:9160", "cas3:9160"}); + try {
--- a/src/main/java/me/prettyprint/cassandra/service/ExampleClient.java +++ b/src/main/java/me/prettyprint/cassandra/service/ExampleClient.java @@ ... @@ CassandraClient client = pool.borrowClient("tush", 9160); + // A load balanced version would look like this: + // CassandraClient client = pool.borrowClient(new String[] {"cas1:9160", "cas2:9160", "cas3:9160"}); + try {
--- a/src/main/java/me/prettyprint/cassandra/service/ExampleClient.java +++ b/src/main/java/me/prettyprint/cassandra/service/ExampleClient.java @@ -19,2 +19,5 @@ CON CassandraClient client = pool.borrowClient("tush", 9160); ADD // A load balanced version would look like this: ADD // CassandraClient client = pool.borrowClient(new String[] {"cas1:9160", "cas2:9160", "cas3:9160"}); ADD CON try {
<<<<<<< SEARCH CassandraClientPool pool = CassandraClientPoolFactory.INSTANCE.get(); CassandraClient client = pool.borrowClient("tush", 9160); try { Keyspace keyspace = client.getKeyspace("Keyspace1"); ======= CassandraClientPool pool = CassandraClientPoolFactory.INSTANCE.get(); CassandraClient client = pool.borrowClient("tush", 9160); // A load balanced version would look like this: // CassandraClient client = pool.borrowClient(new String[] {"cas1:9160", "cas2:9160", "cas3:9160"}); try { Keyspace keyspace = client.getKeyspace("Keyspace1"); >>>>>>> REPLACE
serverboards/serverboards
af90a935ae86507f86df4ff1b793a96ed231f606
frontend/webpack.config-test.babel.js
javascript
apache-2.0
Define __DEV__ for frontend tests
import nodeExternals from 'webpack-node-externals'; var path = require('path') export default { target: 'node', externals: [nodeExternals()], module: { rules: [ //{ test: /\.jsx$/, loaders: ['react-hot', 'babel'], exclude: /node_modules/ }, { test: /\.js$/, exclude: /node_modules/, use: ["react-hot-loader", "babel-loader"] }, { test: /\.css$/, use: ["style-loader","css"] }, { test: /\.sass$/, use: [{ loader: "style-loader" }, { loader: "css-loader" }, { loader: "sass-loader", options : { includePaths: ["./"] } }] }, { test: /\.(jpe?g|png|gif|svg)$/i, loaders: [ 'file-loader', { loader: 'image-webpack-loader', query: { mozjpeg: { progressive: true, }, gifslice: { interlaced: false, }, optipng: { optimizationLevel: 7, }, pngquant: { quality: '65-90', speed: 4 } } } ] } ] }, devtool: "cheap-module-source-map", resolve: { alias:{ app : path.resolve("./app/js"), sass : path.resolve("./app/sass"), lang : path.resolve("./lang") } }, };
import webpack from 'webpack' import nodeExternals from 'webpack-node-externals'; var path = require('path') export default { target: 'node', externals: [nodeExternals()], module: { rules: [ //{ test: /\.jsx$/, loaders: ['react-hot', 'babel'], exclude: /node_modules/ }, { test: /\.js$/, exclude: /node_modules/, use: ["react-hot-loader", "babel-loader"] }, { test: /\.css$/, use: ["style-loader","css"] }, { test: /\.sass$/, use: [{ loader: "style-loader" }, { loader: "css-loader" }, { loader: "sass-loader", options : { includePaths: ["./"] } }] }, { test: /\.(jpe?g|png|gif|svg)$/i, loaders: [ 'file-loader', { loader: 'image-webpack-loader', query: { mozjpeg: { progressive: true, }, gifslice: { interlaced: false, }, optipng: { optimizationLevel: 7, }, pngquant: { quality: '65-90', speed: 4 } } } ] } ] }, devtool: "cheap-module-source-map", resolve: { alias:{ app : path.resolve("./app/js"), sass : path.resolve("./app/sass"), lang : path.resolve("./lang") } }, plugins: [ new webpack.DefinePlugin({ __DEV__: JSON.stringify(false), }) ] };
6
0
2
add_only
--- a/frontend/webpack.config-test.babel.js +++ b/frontend/webpack.config-test.babel.js @@ -1 +1,2 @@ +import webpack from 'webpack' import nodeExternals from 'webpack-node-externals'; @@ -61,2 +62,7 @@ }, + plugins: [ + new webpack.DefinePlugin({ + __DEV__: JSON.stringify(false), + }) + ] };
--- a/frontend/webpack.config-test.babel.js +++ b/frontend/webpack.config-test.babel.js @@ ... @@ +import webpack from 'webpack' import nodeExternals from 'webpack-node-externals'; @@ ... @@ }, + plugins: [ + new webpack.DefinePlugin({ + __DEV__: JSON.stringify(false), + }) + ] };
--- a/frontend/webpack.config-test.babel.js +++ b/frontend/webpack.config-test.babel.js @@ -1 +1,2 @@ ADD import webpack from 'webpack' CON import nodeExternals from 'webpack-node-externals'; @@ -61,2 +62,7 @@ CON }, ADD plugins: [ ADD new webpack.DefinePlugin({ ADD __DEV__: JSON.stringify(false), ADD }) ADD ] CON };
<<<<<<< SEARCH import nodeExternals from 'webpack-node-externals'; var path = require('path') ======= import webpack from 'webpack' import nodeExternals from 'webpack-node-externals'; var path = require('path') >>>>>>> REPLACE <<<<<<< SEARCH } }, }; ======= } }, plugins: [ new webpack.DefinePlugin({ __DEV__: JSON.stringify(false), }) ] }; >>>>>>> REPLACE
Kusand/TaKotlin
25b40a2998677f001e634e1cf86193e7d1d4181e
app/src/main/java/net/ericschrag/takotlin/view/RecipeView.kt
kotlin
mit
Make random loading indicator respect multiple clicks
package net.ericschrag.takotlin.view import android.app.Activity import android.view.View import android.widget.TextView import fr.castorflex.android.smoothprogressbar.SmoothProgressBar import net.ericschrag.takotlin.R import org.jetbrains.anko.* import org.jetbrains.anko.appcompat.v7.toolbar class RecipeView : AnkoComponent<Activity> { var loadingIndicator: SmoothProgressBar? = null var recipeTitle: TextView? = null override fun createView(ui: AnkoContext<Activity>): View { return with(ui) { verticalLayout { toolbar { id = R.id.toolbar backgroundResource = R.color.colorPrimary lparams(width = matchParent, height = wrapContent) } loadingIndicator = smoothProgressBar { lparams(width = matchParent, height = wrapContent) isIndeterminate = true // The below are a workaround for the fact that setting indeterminate to true // auto-starts the progress bar, which is not what is wanted visibility = View.INVISIBLE progressiveStop() } verticalLayout { padding = dip(16) recipeTitle = textView { setTextAppearance(R.style.recipe_name) } } } } } fun showLoading(show: Boolean) { if (show) { loadingIndicator?.visibility = View.VISIBLE loadingIndicator?.progressiveStart() } else { loadingIndicator?.progressiveStop() } } fun showTitle(title : String) { recipeTitle?.setText(title) } }
package net.ericschrag.takotlin.view import android.app.Activity import android.view.View import android.widget.TextView import fr.castorflex.android.smoothprogressbar.SmoothProgressBar import net.ericschrag.takotlin.R import org.jetbrains.anko.* import org.jetbrains.anko.appcompat.v7.toolbar class RecipeView : AnkoComponent<Activity> { var loadingIndicator: SmoothProgressBar? = null var recipeTitle: TextView? = null var indicatorRunning: Boolean = false override fun createView(ui: AnkoContext<Activity>): View { return with(ui) { verticalLayout { toolbar { id = R.id.toolbar backgroundResource = R.color.colorPrimary lparams(width = matchParent, height = wrapContent) } loadingIndicator = smoothProgressBar { lparams(width = matchParent, height = wrapContent) isIndeterminate = true // The below are a workaround for the fact that setting indeterminate to true // auto-starts the progress bar, which is not what is wanted visibility = View.INVISIBLE progressiveStop() } verticalLayout { padding = dip(16) recipeTitle = textView { setTextAppearance(R.style.recipe_name) } } } } } fun showLoading(show: Boolean) { if (show) { loadingIndicator?.visibility = View.VISIBLE if (!indicatorRunning) { indicatorRunning = true loadingIndicator?.progressiveStart() } } else { if (indicatorRunning) { loadingIndicator?.progressiveStop() indicatorRunning = false } } } fun showTitle(title: String) { recipeTitle?.setText(title) } }
11
3
3
mixed
--- a/app/src/main/java/net/ericschrag/takotlin/view/RecipeView.kt +++ b/app/src/main/java/net/ericschrag/takotlin/view/RecipeView.kt @@ -13,2 +13,4 @@ var recipeTitle: TextView? = null + + var indicatorRunning: Boolean = false @@ -43,5 +45,11 @@ loadingIndicator?.visibility = View.VISIBLE - loadingIndicator?.progressiveStart() + if (!indicatorRunning) { + indicatorRunning = true + loadingIndicator?.progressiveStart() + } } else { - loadingIndicator?.progressiveStop() + if (indicatorRunning) { + loadingIndicator?.progressiveStop() + indicatorRunning = false + } } @@ -49,3 +57,3 @@ - fun showTitle(title : String) { + fun showTitle(title: String) { recipeTitle?.setText(title)
--- a/app/src/main/java/net/ericschrag/takotlin/view/RecipeView.kt +++ b/app/src/main/java/net/ericschrag/takotlin/view/RecipeView.kt @@ ... @@ var recipeTitle: TextView? = null + + var indicatorRunning: Boolean = false @@ ... @@ loadingIndicator?.visibility = View.VISIBLE - loadingIndicator?.progressiveStart() + if (!indicatorRunning) { + indicatorRunning = true + loadingIndicator?.progressiveStart() + } } else { - loadingIndicator?.progressiveStop() + if (indicatorRunning) { + loadingIndicator?.progressiveStop() + indicatorRunning = false + } } @@ ... @@ - fun showTitle(title : String) { + fun showTitle(title: String) { recipeTitle?.setText(title)
--- a/app/src/main/java/net/ericschrag/takotlin/view/RecipeView.kt +++ b/app/src/main/java/net/ericschrag/takotlin/view/RecipeView.kt @@ -13,2 +13,4 @@ CON var recipeTitle: TextView? = null ADD ADD var indicatorRunning: Boolean = false CON @@ -43,5 +45,11 @@ CON loadingIndicator?.visibility = View.VISIBLE DEL loadingIndicator?.progressiveStart() ADD if (!indicatorRunning) { ADD indicatorRunning = true ADD loadingIndicator?.progressiveStart() ADD } CON } else { DEL loadingIndicator?.progressiveStop() ADD if (indicatorRunning) { ADD loadingIndicator?.progressiveStop() ADD indicatorRunning = false ADD } CON } @@ -49,3 +57,3 @@ CON DEL fun showTitle(title : String) { ADD fun showTitle(title: String) { CON recipeTitle?.setText(title)
<<<<<<< SEARCH var loadingIndicator: SmoothProgressBar? = null var recipeTitle: TextView? = null override fun createView(ui: AnkoContext<Activity>): View { ======= var loadingIndicator: SmoothProgressBar? = null var recipeTitle: TextView? = null var indicatorRunning: Boolean = false override fun createView(ui: AnkoContext<Activity>): View { >>>>>>> REPLACE <<<<<<< SEARCH if (show) { loadingIndicator?.visibility = View.VISIBLE loadingIndicator?.progressiveStart() } else { loadingIndicator?.progressiveStop() } } fun showTitle(title : String) { recipeTitle?.setText(title) } ======= if (show) { loadingIndicator?.visibility = View.VISIBLE if (!indicatorRunning) { indicatorRunning = true loadingIndicator?.progressiveStart() } } else { if (indicatorRunning) { loadingIndicator?.progressiveStop() indicatorRunning = false } } } fun showTitle(title: String) { recipeTitle?.setText(title) } >>>>>>> REPLACE
mikegehard/user-management-evolution-kotlin
d9e7a79a692dd7e4a01a65d740c40d765e7f21ce
applications/billing/src/main/kotlin/com/example/billing/reocurringPayments/Controller.kt
kotlin
mit
Add note about using delegation
package com.example.billing.reocurringPayments import com.example.payments.Gateway import org.springframework.boot.actuate.metrics.CounterService import org.springframework.http.HttpHeaders import org.springframework.http.HttpStatus import org.springframework.http.MediaType import org.springframework.http.ResponseEntity import org.springframework.web.bind.annotation.RequestBody import org.springframework.web.bind.annotation.RequestMapping import org.springframework.web.bind.annotation.RequestMethod import org.springframework.web.bind.annotation.RestController import javax.inject.Inject @RestController class Controller @Inject constructor(val paymentGateway: Gateway, val counter: CounterService, val service: Service) { @RequestMapping(value = "/reocurringPayment", method = arrayOf(RequestMethod.POST)) fun createReocurringPayment(@RequestBody data: Map<String, Any>): ResponseEntity<String> { val responseHeaders = HttpHeaders().apply { add("content-type", MediaType.APPLICATION_JSON_VALUE) } service.thisMayFail() val createSuccessful = paymentGateway.createReocurringPayment(data["amount"] as Int) val response = if (createSuccessful) { counter.increment("billing.reocurringPayment.created") ResponseEntity("{\"errors\": []}", responseHeaders, HttpStatus.CREATED) } else { ResponseEntity("{\"errors\": [\"error1\", \"error2\"]}", responseHeaders, HttpStatus.BAD_REQUEST) } return response } }
package com.example.billing.reocurringPayments import com.example.payments.Gateway import org.springframework.boot.actuate.metrics.CounterService import org.springframework.http.HttpHeaders import org.springframework.http.HttpStatus import org.springframework.http.MediaType import org.springframework.http.ResponseEntity import org.springframework.web.bind.annotation.RequestBody import org.springframework.web.bind.annotation.RequestMapping import org.springframework.web.bind.annotation.RequestMethod import org.springframework.web.bind.annotation.RestController import javax.inject.Inject // Can you use delegation here??? // https://youtu.be/mSg9kSYfeX0?t=22m35s @RestController class Controller @Inject constructor(val paymentGateway: Gateway, val counter: CounterService, val service: Service) { @RequestMapping(value = "/reocurringPayment", method = arrayOf(RequestMethod.POST)) fun createReocurringPayment(@RequestBody data: Map<String, Any>): ResponseEntity<String> { val responseHeaders = HttpHeaders().apply { add("content-type", MediaType.APPLICATION_JSON_VALUE) } service.thisMayFail() val createSuccessful = paymentGateway.createReocurringPayment(data["amount"] as Int) val response = if (createSuccessful) { counter.increment("billing.reocurringPayment.created") ResponseEntity("{\"errors\": []}", responseHeaders, HttpStatus.CREATED) } else { ResponseEntity("{\"errors\": [\"error1\", \"error2\"]}", responseHeaders, HttpStatus.BAD_REQUEST) } return response } }
3
0
1
add_only
--- a/applications/billing/src/main/kotlin/com/example/billing/reocurringPayments/Controller.kt +++ b/applications/billing/src/main/kotlin/com/example/billing/reocurringPayments/Controller.kt @@ -14,2 +14,5 @@ + +// Can you use delegation here??? +// https://youtu.be/mSg9kSYfeX0?t=22m35s @RestController
--- a/applications/billing/src/main/kotlin/com/example/billing/reocurringPayments/Controller.kt +++ b/applications/billing/src/main/kotlin/com/example/billing/reocurringPayments/Controller.kt @@ ... @@ + +// Can you use delegation here??? +// https://youtu.be/mSg9kSYfeX0?t=22m35s @RestController
--- a/applications/billing/src/main/kotlin/com/example/billing/reocurringPayments/Controller.kt +++ b/applications/billing/src/main/kotlin/com/example/billing/reocurringPayments/Controller.kt @@ -14,2 +14,5 @@ CON ADD ADD // Can you use delegation here??? ADD // https://youtu.be/mSg9kSYfeX0?t=22m35s CON @RestController
<<<<<<< SEARCH import javax.inject.Inject @RestController class Controller @Inject constructor(val paymentGateway: Gateway, val counter: CounterService, val service: Service) { ======= import javax.inject.Inject // Can you use delegation here??? // https://youtu.be/mSg9kSYfeX0?t=22m35s @RestController class Controller @Inject constructor(val paymentGateway: Gateway, val counter: CounterService, val service: Service) { >>>>>>> REPLACE
lises/sheldon
2a0b1d070996bfb3d950d4fae70b264ddabc7d2f
sheldon/config.py
python
mit
Add function for getting installed plugins
# -*- coding: utf-8 -*- """ @author: Seva Zhidkov @contact: [email protected] @license: The MIT license Copyright (C) 2015 """ import os class Config: def __init__(self, prefix='SHELDON_'): """ Load config from environment variables. :param prefix: string, all needed environment variables starts from it. Default - 'SHELDON_'. So, environment variables will be looking like: 'SHELDON_BOT_NAME', 'SHELDON_TWITTER_KEY' :return: """ # Bot config variables self.variables = {} for variable in os.environ: if variable.startswith(prefix): self.variables[variable] = os.environ[variable] def get(self, variable, default_value): """ :param variable: string, needed variable :param default_value: string, value that returns if variable is not set :return: """ if variable not in self.variables: return default_value return self.variables[variable]
# -*- coding: utf-8 -*- """ @author: Seva Zhidkov @contact: [email protected] @license: The MIT license Copyright (C) 2015 """ import os class Config: def __init__(self, prefix='SHELDON_'): """ Load config from environment variables. :param prefix: string, all needed environment variables starts from it. Default - 'SHELDON_'. So, environment variables will be looking like: 'SHELDON_BOT_NAME', 'SHELDON_TWITTER_KEY' :return: """ # Bot config variables self.variables = {} for variable in os.environ: if variable.startswith(prefix): self.variables[variable] = os.environ[variable] def get(self, variable, default_value): """ Get variable value from environment :param variable: string, needed variable :param default_value: string, value that returns if variable is not set :return: variable value """ if variable not in self.variables: return default_value return self.variables[variable] def get_installed_plugins(self): """ Return list of installed plugins from installed_plugins.txt :return: list of strings with names of plugins """ plugins_file = open('installed_plugins.txt') return plugins_file.readlines()
10
1
3
mixed
--- a/sheldon/config.py +++ b/sheldon/config.py @@ -34,2 +34,3 @@ """ + Get variable value from environment @@ -38,3 +39,3 @@ variable is not set - :return: + :return: variable value """ @@ -45 +46,9 @@ + def get_installed_plugins(self): + """ + Return list of installed plugins from installed_plugins.txt + :return: list of strings with names of plugins + """ + plugins_file = open('installed_plugins.txt') + return plugins_file.readlines() +
--- a/sheldon/config.py +++ b/sheldon/config.py @@ ... @@ """ + Get variable value from environment @@ ... @@ variable is not set - :return: + :return: variable value """ @@ ... @@ + def get_installed_plugins(self): + """ + Return list of installed plugins from installed_plugins.txt + :return: list of strings with names of plugins + """ + plugins_file = open('installed_plugins.txt') + return plugins_file.readlines() +
--- a/sheldon/config.py +++ b/sheldon/config.py @@ -34,2 +34,3 @@ CON """ ADD Get variable value from environment CON @@ -38,3 +39,3 @@ CON variable is not set DEL :return: ADD :return: variable value CON """ @@ -45 +46,9 @@ CON ADD def get_installed_plugins(self): ADD """ ADD Return list of installed plugins from installed_plugins.txt ADD :return: list of strings with names of plugins ADD """ ADD plugins_file = open('installed_plugins.txt') ADD return plugins_file.readlines() ADD
<<<<<<< SEARCH def get(self, variable, default_value): """ :param variable: string, needed variable :param default_value: string, value that returns if variable is not set :return: """ if variable not in self.variables: ======= def get(self, variable, default_value): """ Get variable value from environment :param variable: string, needed variable :param default_value: string, value that returns if variable is not set :return: variable value """ if variable not in self.variables: >>>>>>> REPLACE <<<<<<< SEARCH return self.variables[variable] ======= return self.variables[variable] def get_installed_plugins(self): """ Return list of installed plugins from installed_plugins.txt :return: list of strings with names of plugins """ plugins_file = open('installed_plugins.txt') return plugins_file.readlines() >>>>>>> REPLACE
proxer/ProxerLibJava
e3a67601b22ac76b71ae3883355b05310c35a52f
library/src/main/kotlin/me/proxer/library/enums/Medium.kt
kotlin
mit
Add support for web novels
package me.proxer.library.enums import com.squareup.moshi.Json import com.squareup.moshi.JsonClass /** * Enum holding the available mediums. * * @author Ruben Gees */ @JsonClass(generateAdapter = false) enum class Medium { @Json(name = "animeseries") ANIMESERIES, @Json(name = "movie") MOVIE, @Json(name = "ova") OVA, @Json(name = "hentai") HENTAI, @Json(name = "mangaseries") MANGASERIES, @Json(name = "lightnovel") LIGHTNOVEL, @Json(name = "oneshot") ONESHOT, @Json(name = "doujin") DOUJIN, @Json(name = "hmanga") HMANGA, @Json(name = "") OTHER }
package me.proxer.library.enums import com.squareup.moshi.Json import com.squareup.moshi.JsonClass /** * Enum holding the available mediums. * * @author Ruben Gees */ @JsonClass(generateAdapter = false) enum class Medium { @Json(name = "animeseries") ANIMESERIES, @Json(name = "movie") MOVIE, @Json(name = "ova") OVA, @Json(name = "hentai") HENTAI, @Json(name = "mangaseries") MANGASERIES, @Json(name = "lightnovel") LIGHTNOVEL, @Json(name = "webnovel") WEBNOVEL, @Json(name = "oneshot") ONESHOT, @Json(name = "doujin") DOUJIN, @Json(name = "hmanga") HMANGA, @Json(name = "") OTHER }
3
0
1
add_only
--- a/library/src/main/kotlin/me/proxer/library/enums/Medium.kt +++ b/library/src/main/kotlin/me/proxer/library/enums/Medium.kt @@ -31,2 +31,5 @@ + @Json(name = "webnovel") + WEBNOVEL, + @Json(name = "oneshot")
--- a/library/src/main/kotlin/me/proxer/library/enums/Medium.kt +++ b/library/src/main/kotlin/me/proxer/library/enums/Medium.kt @@ ... @@ + @Json(name = "webnovel") + WEBNOVEL, + @Json(name = "oneshot")
--- a/library/src/main/kotlin/me/proxer/library/enums/Medium.kt +++ b/library/src/main/kotlin/me/proxer/library/enums/Medium.kt @@ -31,2 +31,5 @@ CON ADD @Json(name = "webnovel") ADD WEBNOVEL, ADD CON @Json(name = "oneshot")
<<<<<<< SEARCH LIGHTNOVEL, @Json(name = "oneshot") ONESHOT, ======= LIGHTNOVEL, @Json(name = "webnovel") WEBNOVEL, @Json(name = "oneshot") ONESHOT, >>>>>>> REPLACE
jampekka/openhilma
5fade4bc26c2637a479a69051cee37a1a859c71a
load_hilma.py
python
agpl-3.0
Use the notice ID as priary key Gentlemen, drop your DBs!
#!/usr/bin/env python3 import xml.etree.ElementTree as ET import sys import pymongo from pathlib import Path import argh from xml2json import etree_to_dict from hilma_conversion import get_handler hilma_to_dict = lambda notice: etree_to_dict(notice, get_handler) def load_hilma_xml(inputfile, collection): root = ET.parse(inputfile).getroot() notices = list(root.iterfind('WRAPPED_NOTICE')) notices = map(hilma_to_dict, notices) collection.ensure_index('ID', unique=True) for n in notices: # Use the ID as primary key n.update('_id', n['ID']) collection.save(n) def sync_hilma_xml_directory(directory, mongo_uri=None, mongo_db='openhilma'): if mongo_uri is None: client = pymongo.MongoClient() else: client = pymongo.MongoClient(mongo_uri) db = client[mongo_db] collection = db.notices paths = sorted(Path(directory).glob("*.xml")) for fpath in paths: load_hilma_xml(fpath.open(), collection) if __name__ == '__main__': argh.dispatch_command(sync_hilma_xml_directory)
#!/usr/bin/env python3 import xml.etree.ElementTree as ET import sys import pymongo from pathlib import Path import argh from xml2json import etree_to_dict from hilma_conversion import get_handler hilma_to_dict = lambda notice: etree_to_dict(notice, get_handler) def load_hilma_xml(inputfile, collection): root = ET.parse(inputfile).getroot() notices = list(root.iterfind('WRAPPED_NOTICE')) notices = map(hilma_to_dict, notices) for n in notices: # Use the ID as primary key n.update({'_id': n['ID']}) collection.save(n) def sync_hilma_xml_directory(directory, mongo_uri=None, mongo_db='openhilma'): if mongo_uri is None: client = pymongo.MongoClient() else: client = pymongo.MongoClient(mongo_uri) db = client[mongo_db] collection = db.notices paths = sorted(Path(directory).glob("*.xml")) for fpath in paths: load_hilma_xml(fpath.open(), collection) if __name__ == '__main__': argh.dispatch_command(sync_hilma_xml_directory)
1
3
1
mixed
--- a/load_hilma.py +++ b/load_hilma.py @@ -20,7 +20,5 @@ - collection.ensure_index('ID', unique=True) - for n in notices: # Use the ID as primary key - n.update('_id', n['ID']) + n.update({'_id': n['ID']}) collection.save(n)
--- a/load_hilma.py +++ b/load_hilma.py @@ ... @@ - collection.ensure_index('ID', unique=True) - for n in notices: # Use the ID as primary key - n.update('_id', n['ID']) + n.update({'_id': n['ID']}) collection.save(n)
--- a/load_hilma.py +++ b/load_hilma.py @@ -20,7 +20,5 @@ CON DEL collection.ensure_index('ID', unique=True) DEL CON for n in notices: CON # Use the ID as primary key DEL n.update('_id', n['ID']) ADD n.update({'_id': n['ID']}) CON collection.save(n)
<<<<<<< SEARCH notices = map(hilma_to_dict, notices) collection.ensure_index('ID', unique=True) for n in notices: # Use the ID as primary key n.update('_id', n['ID']) collection.save(n) ======= notices = map(hilma_to_dict, notices) for n in notices: # Use the ID as primary key n.update({'_id': n['ID']}) collection.save(n) >>>>>>> REPLACE
ejwaibel/squarrels
571528880983a800fe4b9a87dcc132d6209ae779
src/app/components/card/card.controller.js
javascript
unlicense
Set card to 'disabled' if player isn't 'active'
export default class CardController { constructor($rootScope, $scope, $log, _, playerModel, cardsApi) { 'ngInject'; this.$rootScope = $rootScope; this.$scope = $scope; this.$log = $log; this._ = _; this.cardsApi = cardsApi; this.playerModel = playerModel.model; this.$log.info('constructor()', this); } $onInit() { let onSuccess = (res => { this.$log.info('onSuccess()', res, this); if (res.status === 200) { this.$scope.cardData = res.data[0]; } }), onError = (err => { this.$log.error(err); }); this.$log.info('$onInit()', this); this.$scope.cardData = {}; this.$scope.isDisabled = !this.cardId || this.cardType === 'storage'; if (this.cardId) { this.cardsApi .get(this.cardId) .then(onSuccess, onError); } } $onDestroy() { return () => { this.$log.info('$onDestroy()', this); }; } onClick($e) { this.$log.info('onClick()', this); $e.preventDefault(); } };
export default class CardController { constructor($rootScope, $scope, $log, _, playerModel, cardsApi) { 'ngInject'; this.$rootScope = $rootScope; this.$scope = $scope; this.$log = $log; this._ = _; this.cardsApi = cardsApi; this.playerModel = playerModel.model; this.$log.info('constructor()', this); } $onInit() { let onSuccess = (res => { this.$log.info('onSuccess()', res, this); if (res.status === 200) { this.$scope.cardData = res.data[0]; } }), onError = (err => { this.$log.error(err); }); this.$log.info('$onInit()', this); this.$scope.cardData = {}; this.$scope.isDisabled = !this.cardId || this.cardType === 'storage' || !this.player.isActive; if (this.cardId) { this.cardsApi .get(this.cardId) .then(onSuccess, onError); } } $onDestroy() { return () => { this.$log.info('$onDestroy()', this); }; } onClick($e) { this.$log.info('onClick()', this); $e.preventDefault(); } };
1
1
1
mixed
--- a/src/app/components/card/card.controller.js +++ b/src/app/components/card/card.controller.js @@ -30,3 +30,3 @@ this.$scope.cardData = {}; - this.$scope.isDisabled = !this.cardId || this.cardType === 'storage'; + this.$scope.isDisabled = !this.cardId || this.cardType === 'storage' || !this.player.isActive;
--- a/src/app/components/card/card.controller.js +++ b/src/app/components/card/card.controller.js @@ ... @@ this.$scope.cardData = {}; - this.$scope.isDisabled = !this.cardId || this.cardType === 'storage'; + this.$scope.isDisabled = !this.cardId || this.cardType === 'storage' || !this.player.isActive;
--- a/src/app/components/card/card.controller.js +++ b/src/app/components/card/card.controller.js @@ -30,3 +30,3 @@ CON this.$scope.cardData = {}; DEL this.$scope.isDisabled = !this.cardId || this.cardType === 'storage'; ADD this.$scope.isDisabled = !this.cardId || this.cardType === 'storage' || !this.player.isActive; CON
<<<<<<< SEARCH this.$scope.cardData = {}; this.$scope.isDisabled = !this.cardId || this.cardType === 'storage'; if (this.cardId) { ======= this.$scope.cardData = {}; this.$scope.isDisabled = !this.cardId || this.cardType === 'storage' || !this.player.isActive; if (this.cardId) { >>>>>>> REPLACE
pnavarrc/chirp-server
145ba38103c9e179dde639c2b9131be8c4c33468
01-twitter-sample.js
javascript
mit
Update the twitter sample example
// Twitter Sample // // Using the Twitter Streaming API. In this example, we retrieve the // sample of the statuses in real time. // Import node modules var Twit = require('twit'), // Twitter API Client config = require('./config.json'); // Twitter Credentials // Configure the Twit object with the application credentials var T = new Twit(config); // Subscribe to the stream sample, for tweets in english var stream = T.stream('statuses/sample'); // The callback will be invoked on each tweet. Here, we print the username // and the text of the tweet in the screen. stream.on('tweet', function(tweet) { console.log('[@' + tweet.user.screen_name + ']: ' + tweet.text); }); // The 'connect' callback is invoked when the Twitter API Client // tries to connect to Twitter. stream.on('connect', function(msg) { console.log('connect'); }); // The 'connected' event is triggered when the connection is successful. stream.on('connected', function(msg) { console.log('connected'); }); // The 'warning' event is triggered if the client is not processing the // tweets fast enough. stream.on('warning', function(msg) { console.warning('warning') }); // The 'disconnect' event is triggered when a disconnect message comes from // Twitter. stream.on('disconnect', function(msg) { console.log('disconnect'); });
// Twitter Sample // // Using the Twitter Streaming API. In this example, we retrieve the // sample of the statuses in real time. // Import node modules var Twit = require('twit'), // Twitter API Client config = require('./config.json'); // Twitter Credentials // Configure the Twit object with the application credentials var T = new Twit(config); // Subscribe to the sample stream and begin listening var stream = T.stream('statuses/sample'); // The callback will be invoked on each tweet. Here, we print the username // and the text of the tweet in the screen. stream.on('tweet', function(tweet) { console.log('[@' + tweet.user.screen_name + ']: ' + tweet.text); }); // The 'connect' callback is invoked when the Twitter API Client // tries to connect to Twitter. stream.on('connect', function(msg) { console.log('connect'); }); // The 'connected' event is triggered when the connection is successful. stream.on('connected', function(msg) { console.log('connected'); }); // The 'reconnect' event is triggered when a reconnection is scheduled. stream.on('reconnect', function(req, res, interval) { console.log('Reconnecting in ' + (interval / 3) + ' seconds.'); }); // The 'warning' event is triggered if the client is not processing the // tweets fast enough. stream.on('warning', function(msg) { console.warning('warning') }); // The 'disconnect' event is triggered when a disconnect message comes from // Twitter. stream.on('disconnect', function(msg) { console.log('disconnect'); });
6
1
2
mixed
--- a/01-twitter-sample.js +++ b/01-twitter-sample.js @@ -12,3 +12,3 @@ -// Subscribe to the stream sample, for tweets in english +// Subscribe to the sample stream and begin listening var stream = T.stream('statuses/sample'); @@ -32,2 +32,7 @@ +// The 'reconnect' event is triggered when a reconnection is scheduled. +stream.on('reconnect', function(req, res, interval) { + console.log('Reconnecting in ' + (interval / 3) + ' seconds.'); +}); + // The 'warning' event is triggered if the client is not processing the
--- a/01-twitter-sample.js +++ b/01-twitter-sample.js @@ ... @@ -// Subscribe to the stream sample, for tweets in english +// Subscribe to the sample stream and begin listening var stream = T.stream('statuses/sample'); @@ ... @@ +// The 'reconnect' event is triggered when a reconnection is scheduled. +stream.on('reconnect', function(req, res, interval) { + console.log('Reconnecting in ' + (interval / 3) + ' seconds.'); +}); + // The 'warning' event is triggered if the client is not processing the
--- a/01-twitter-sample.js +++ b/01-twitter-sample.js @@ -12,3 +12,3 @@ CON DEL // Subscribe to the stream sample, for tweets in english ADD // Subscribe to the sample stream and begin listening CON var stream = T.stream('statuses/sample'); @@ -32,2 +32,7 @@ CON ADD // The 'reconnect' event is triggered when a reconnection is scheduled. ADD stream.on('reconnect', function(req, res, interval) { ADD console.log('Reconnecting in ' + (interval / 3) + ' seconds.'); ADD }); ADD CON // The 'warning' event is triggered if the client is not processing the
<<<<<<< SEARCH var T = new Twit(config); // Subscribe to the stream sample, for tweets in english var stream = T.stream('statuses/sample'); ======= var T = new Twit(config); // Subscribe to the sample stream and begin listening var stream = T.stream('statuses/sample'); >>>>>>> REPLACE <<<<<<< SEARCH }); // The 'warning' event is triggered if the client is not processing the // tweets fast enough. ======= }); // The 'reconnect' event is triggered when a reconnection is scheduled. stream.on('reconnect', function(req, res, interval) { console.log('Reconnecting in ' + (interval / 3) + ' seconds.'); }); // The 'warning' event is triggered if the client is not processing the // tweets fast enough. >>>>>>> REPLACE
simonyiszk/mvk-web
ee3a817f275e3bd12e4418ca8db00d852b69f775
gatsby-node.js
javascript
mit
Fix broken locale copying mechanism
const fs = require('fs-extra'); const path = require('path'); const { createFilePath } = require('gatsby-source-filesystem'); exports.onCreateNode = ({ node, getNode, boundActionCreators }) => { const { createNodeField } = boundActionCreators; if (node.internal.type === 'MarkdownRemark') { const slug = createFilePath({ node, getNode, basePath: 'pages' }); createNodeField({ node, name: 'slug', value: slug, }); } }; exports.onPostBootstrap = () => { console.log('Copying locales'); // eslint-disable-line no-console fs.copySync(path.join(__dirname, '/src/locales'), path.join(__dirname, '/public/locales')); }; exports.createPages = ({ graphql, boundActionCreators }) => { const { createPage } = boundActionCreators; return new Promise((resolve) => { graphql(` { allMarkdownRemark { edges { node { fields { slug } } } } } `).then(({ data }) => { data.allMarkdownRemark.edges.forEach(({ node }) => { createPage({ path: node.fields.slug, component: path.resolve('./src/templates/blog-post.jsx'), context: { // Data passed to context is available in page queries as GraphQL variables slug: node.fields.slug, }, }); }); resolve(); }); }); };
const fs = require('fs-extra'); const path = require('path'); const { createFilePath } = require('gatsby-source-filesystem'); exports.onCreateNode = ({ node, getNode, boundActionCreators }) => { const { createNodeField } = boundActionCreators; if (node.internal.type === 'MarkdownRemark') { const slug = createFilePath({ node, getNode, basePath: 'pages' }); createNodeField({ node, name: 'slug', value: slug, }); } }; exports.onPreBootstrap = () => { console.log('Copying locales'); // eslint-disable-line no-console fs.copySync(path.join(__dirname, '/src/locales'), path.join(__dirname, '/public/locales')); }; exports.createPages = ({ graphql, boundActionCreators }) => { const { createPage } = boundActionCreators; return new Promise((resolve) => { graphql(` { allMarkdownRemark { edges { node { fields { slug } } } } } `).then(({ data }) => { data.allMarkdownRemark.edges.forEach(({ node }) => { createPage({ path: node.fields.slug, component: path.resolve('./src/templates/blog-post.jsx'), context: { // Data passed to context is available in page queries as GraphQL variables slug: node.fields.slug, }, }); }); resolve(); }); }); };
1
1
1
mixed
--- a/gatsby-node.js +++ b/gatsby-node.js @@ -17,3 +17,3 @@ -exports.onPostBootstrap = () => { +exports.onPreBootstrap = () => { console.log('Copying locales'); // eslint-disable-line no-console
--- a/gatsby-node.js +++ b/gatsby-node.js @@ ... @@ -exports.onPostBootstrap = () => { +exports.onPreBootstrap = () => { console.log('Copying locales'); // eslint-disable-line no-console
--- a/gatsby-node.js +++ b/gatsby-node.js @@ -17,3 +17,3 @@ CON DEL exports.onPostBootstrap = () => { ADD exports.onPreBootstrap = () => { CON console.log('Copying locales'); // eslint-disable-line no-console
<<<<<<< SEARCH }; exports.onPostBootstrap = () => { console.log('Copying locales'); // eslint-disable-line no-console fs.copySync(path.join(__dirname, '/src/locales'), path.join(__dirname, '/public/locales')); ======= }; exports.onPreBootstrap = () => { console.log('Copying locales'); // eslint-disable-line no-console fs.copySync(path.join(__dirname, '/src/locales'), path.join(__dirname, '/public/locales')); >>>>>>> REPLACE
radremedy/radremedy
3940fd8b58b6a21627ef0ff62f7480593e5108eb
remedy/radremedy.py
python
mpl-2.0
Move around imports and not shadow app
#!/usr/bin/env python """ radremedy.py Main web application file. Contains initial setup of database, API, and other components. Also contains the setup of the routes. """ from flask import Flask, url_for, request, abort from flask.ext.script import Manager from flask.ext.migrate import Migrate, MigrateCommand from rad.models import db, Resource def create_app(config, models=()): from remedyblueprint import remedy, url_for_other_page app = Flask(__name__) app.config.from_object(config) app.register_blueprint(remedy) # searching configurations app.jinja_env.trim_blocks = True # Register the paging helper method with Jinja2 app.jinja_env.globals['url_for_other_page'] = url_for_other_page db.init_app(app) Migrate(app, db, directory=app.config['MIGRATIONS_DIR']) manager = Manager(app) manager.add_command('db', MigrateCommand) # turning API off for now # from api_manager import init_api_manager # api_manager = init_api_manager(app, db) # map(lambda m: api_manager.create_api(m), models) return app, manager if __name__ == '__main__': app, manager = create_app('config.BaseConfig', (Resource, )) with app.app_context(): manager.run()
#!/usr/bin/env python """ radremedy.py Main web application file. Contains initial setup of database, API, and other components. Also contains the setup of the routes. """ from flask import Flask, url_for, request, abort from flask.ext.script import Manager from flask.ext.migrate import Migrate, MigrateCommand from flask.ext.login import current_user from rad.models import db, Resource def create_app(config, models=()): app = Flask(__name__) app.config.from_object(config) from remedyblueprint import remedy, url_for_other_page app.register_blueprint(remedy) from auth.user_auth import auth, login_manager app.register_blueprint(auth) login_manager.init_app(app) # searching configurations app.jinja_env.trim_blocks = True # Register the paging helper method with Jinja2 app.jinja_env.globals['url_for_other_page'] = url_for_other_page app.jinja_env.globals['logged_in'] = lambda : not current_user.is_anonymous() db.init_app(app) Migrate(app, db, directory=app.config['MIGRATIONS_DIR']) manager = Manager(app) manager.add_command('db', MigrateCommand) # turning API off for now # from api_manager import init_api_manager # api_manager = init_api_manager(app, db) # map(lambda m: api_manager.create_api(m), models) return app, manager if __name__ == '__main__': application, manager = create_app('config.BaseConfig', (Resource, )) with application.app_context(): manager.run()
9
4
5
mixed
--- a/remedy/radremedy.py +++ b/remedy/radremedy.py @@ -10,2 +10,3 @@ from flask.ext.migrate import Migrate, MigrateCommand +from flask.ext.login import current_user from rad.models import db, Resource @@ -15,4 +16,2 @@ - from remedyblueprint import remedy, url_for_other_page - app = Flask(__name__) @@ -20,3 +19,8 @@ + from remedyblueprint import remedy, url_for_other_page app.register_blueprint(remedy) + + from auth.user_auth import auth, login_manager + app.register_blueprint(auth) + login_manager.init_app(app) @@ -26,2 +30,3 @@ app.jinja_env.globals['url_for_other_page'] = url_for_other_page + app.jinja_env.globals['logged_in'] = lambda : not current_user.is_anonymous() @@ -42,5 +47,5 @@ if __name__ == '__main__': - app, manager = create_app('config.BaseConfig', (Resource, )) + application, manager = create_app('config.BaseConfig', (Resource, )) - with app.app_context(): + with application.app_context(): manager.run()
--- a/remedy/radremedy.py +++ b/remedy/radremedy.py @@ ... @@ from flask.ext.migrate import Migrate, MigrateCommand +from flask.ext.login import current_user from rad.models import db, Resource @@ ... @@ - from remedyblueprint import remedy, url_for_other_page - app = Flask(__name__) @@ ... @@ + from remedyblueprint import remedy, url_for_other_page app.register_blueprint(remedy) + + from auth.user_auth import auth, login_manager + app.register_blueprint(auth) + login_manager.init_app(app) @@ ... @@ app.jinja_env.globals['url_for_other_page'] = url_for_other_page + app.jinja_env.globals['logged_in'] = lambda : not current_user.is_anonymous() @@ ... @@ if __name__ == '__main__': - app, manager = create_app('config.BaseConfig', (Resource, )) + application, manager = create_app('config.BaseConfig', (Resource, )) - with app.app_context(): + with application.app_context(): manager.run()
--- a/remedy/radremedy.py +++ b/remedy/radremedy.py @@ -10,2 +10,3 @@ CON from flask.ext.migrate import Migrate, MigrateCommand ADD from flask.ext.login import current_user CON from rad.models import db, Resource @@ -15,4 +16,2 @@ CON DEL from remedyblueprint import remedy, url_for_other_page DEL CON app = Flask(__name__) @@ -20,3 +19,8 @@ CON ADD from remedyblueprint import remedy, url_for_other_page CON app.register_blueprint(remedy) ADD ADD from auth.user_auth import auth, login_manager ADD app.register_blueprint(auth) ADD login_manager.init_app(app) CON @@ -26,2 +30,3 @@ CON app.jinja_env.globals['url_for_other_page'] = url_for_other_page ADD app.jinja_env.globals['logged_in'] = lambda : not current_user.is_anonymous() CON @@ -42,5 +47,5 @@ CON if __name__ == '__main__': DEL app, manager = create_app('config.BaseConfig', (Resource, )) ADD application, manager = create_app('config.BaseConfig', (Resource, )) CON DEL with app.app_context(): ADD with application.app_context(): CON manager.run()
<<<<<<< SEARCH from flask.ext.script import Manager from flask.ext.migrate import Migrate, MigrateCommand from rad.models import db, Resource def create_app(config, models=()): from remedyblueprint import remedy, url_for_other_page app = Flask(__name__) app.config.from_object(config) app.register_blueprint(remedy) # searching configurations app.jinja_env.trim_blocks = True # Register the paging helper method with Jinja2 app.jinja_env.globals['url_for_other_page'] = url_for_other_page db.init_app(app) ======= from flask.ext.script import Manager from flask.ext.migrate import Migrate, MigrateCommand from flask.ext.login import current_user from rad.models import db, Resource def create_app(config, models=()): app = Flask(__name__) app.config.from_object(config) from remedyblueprint import remedy, url_for_other_page app.register_blueprint(remedy) from auth.user_auth import auth, login_manager app.register_blueprint(auth) login_manager.init_app(app) # searching configurations app.jinja_env.trim_blocks = True # Register the paging helper method with Jinja2 app.jinja_env.globals['url_for_other_page'] = url_for_other_page app.jinja_env.globals['logged_in'] = lambda : not current_user.is_anonymous() db.init_app(app) >>>>>>> REPLACE <<<<<<< SEARCH if __name__ == '__main__': app, manager = create_app('config.BaseConfig', (Resource, )) with app.app_context(): manager.run() ======= if __name__ == '__main__': application, manager = create_app('config.BaseConfig', (Resource, )) with application.app_context(): manager.run() >>>>>>> REPLACE
AcornUI/Acorn
a5894e53d694cffd7911e4383659af2cd6e4085b
acornui-utils/build.gradle.kts
kotlin
apache-2.0
Refactor acornui-utils to use 'basic' plugin
/* * Copyright 2019 PolyForest * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ plugins { kotlin("multiplatform") `maven-publish` } val KOTLIN_LANGUAGE_VERSION: String by extra val KOTLIN_JVM_TARGET: String by extra kotlin { js { compilations.all { kotlinOptions { moduleKind = "amd" sourceMap = true sourceMapEmbedSources = "always" main = "noCall" } } } jvm { compilations.all { kotlinOptions { jvmTarget = KOTLIN_JVM_TARGET } } } targets.all { compilations.all { kotlinOptions { languageVersion = KOTLIN_LANGUAGE_VERSION apiVersion = KOTLIN_LANGUAGE_VERSION verbose = true } } } sourceSets { commonMain { dependencies { implementation(kotlin("stdlib-common")) } } commonTest { dependencies { implementation(kotlin("test-common")) implementation(kotlin("test-annotations-common")) implementation(project(":acornui-test-utils")) } } named("jvmMain") { dependencies { implementation(kotlin("stdlib-jdk8")) } } named("jvmTest") { dependencies { implementation(kotlin("test")) implementation(kotlin("test-junit")) } } named("jsMain") { dependencies { implementation(kotlin("stdlib-js")) } } named("jsTest") { dependencies { implementation(kotlin("test-js")) } } } }
/* * Copyright 2019 PolyForest * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ plugins { id("com.polyforest.acornui.basic") `maven-publish` } kotlin { sourceSets { commonTest { dependencies { implementation(project(":acornui-test-utils")) } } } }
1
58
2
mixed
--- a/acornui-utils/build.gradle.kts +++ b/acornui-utils/build.gradle.kts @@ -17,3 +17,3 @@ plugins { - kotlin("multiplatform") + id("com.polyforest.acornui.basic") `maven-publish` @@ -21,64 +21,7 @@ -val KOTLIN_LANGUAGE_VERSION: String by extra -val KOTLIN_JVM_TARGET: String by extra kotlin { - js { - compilations.all { - kotlinOptions { - moduleKind = "amd" - sourceMap = true - sourceMapEmbedSources = "always" - main = "noCall" - } - } - } - jvm { - compilations.all { - kotlinOptions { - jvmTarget = KOTLIN_JVM_TARGET - } - } - } - targets.all { - compilations.all { - kotlinOptions { - languageVersion = KOTLIN_LANGUAGE_VERSION - apiVersion = KOTLIN_LANGUAGE_VERSION - verbose = true - } - } - } - sourceSets { - commonMain { - dependencies { - implementation(kotlin("stdlib-common")) - } - } commonTest { dependencies { - implementation(kotlin("test-common")) - implementation(kotlin("test-annotations-common")) implementation(project(":acornui-test-utils")) - } - } - named("jvmMain") { - dependencies { - implementation(kotlin("stdlib-jdk8")) - } - } - named("jvmTest") { - dependencies { - implementation(kotlin("test")) - implementation(kotlin("test-junit")) - } - } - named("jsMain") { - dependencies { - implementation(kotlin("stdlib-js")) - } - } - named("jsTest") { - dependencies { - implementation(kotlin("test-js")) }
--- a/acornui-utils/build.gradle.kts +++ b/acornui-utils/build.gradle.kts @@ ... @@ plugins { - kotlin("multiplatform") + id("com.polyforest.acornui.basic") `maven-publish` @@ ... @@ -val KOTLIN_LANGUAGE_VERSION: String by extra -val KOTLIN_JVM_TARGET: String by extra kotlin { - js { - compilations.all { - kotlinOptions { - moduleKind = "amd" - sourceMap = true - sourceMapEmbedSources = "always" - main = "noCall" - } - } - } - jvm { - compilations.all { - kotlinOptions { - jvmTarget = KOTLIN_JVM_TARGET - } - } - } - targets.all { - compilations.all { - kotlinOptions { - languageVersion = KOTLIN_LANGUAGE_VERSION - apiVersion = KOTLIN_LANGUAGE_VERSION - verbose = true - } - } - } - sourceSets { - commonMain { - dependencies { - implementation(kotlin("stdlib-common")) - } - } commonTest { dependencies { - implementation(kotlin("test-common")) - implementation(kotlin("test-annotations-common")) implementation(project(":acornui-test-utils")) - } - } - named("jvmMain") { - dependencies { - implementation(kotlin("stdlib-jdk8")) - } - } - named("jvmTest") { - dependencies { - implementation(kotlin("test")) - implementation(kotlin("test-junit")) - } - } - named("jsMain") { - dependencies { - implementation(kotlin("stdlib-js")) - } - } - named("jsTest") { - dependencies { - implementation(kotlin("test-js")) }
--- a/acornui-utils/build.gradle.kts +++ b/acornui-utils/build.gradle.kts @@ -17,3 +17,3 @@ CON plugins { DEL kotlin("multiplatform") ADD id("com.polyforest.acornui.basic") CON `maven-publish` @@ -21,64 +21,7 @@ CON DEL val KOTLIN_LANGUAGE_VERSION: String by extra DEL val KOTLIN_JVM_TARGET: String by extra CON kotlin { DEL js { DEL compilations.all { DEL kotlinOptions { DEL moduleKind = "amd" DEL sourceMap = true DEL sourceMapEmbedSources = "always" DEL main = "noCall" DEL } DEL } DEL } DEL jvm { DEL compilations.all { DEL kotlinOptions { DEL jvmTarget = KOTLIN_JVM_TARGET DEL } DEL } DEL } DEL targets.all { DEL compilations.all { DEL kotlinOptions { DEL languageVersion = KOTLIN_LANGUAGE_VERSION DEL apiVersion = KOTLIN_LANGUAGE_VERSION DEL verbose = true DEL } DEL } DEL } DEL CON sourceSets { DEL commonMain { DEL dependencies { DEL implementation(kotlin("stdlib-common")) DEL } DEL } CON commonTest { CON dependencies { DEL implementation(kotlin("test-common")) DEL implementation(kotlin("test-annotations-common")) CON implementation(project(":acornui-test-utils")) DEL } DEL } DEL named("jvmMain") { DEL dependencies { DEL implementation(kotlin("stdlib-jdk8")) DEL } DEL } DEL named("jvmTest") { DEL dependencies { DEL implementation(kotlin("test")) DEL implementation(kotlin("test-junit")) DEL } DEL } DEL named("jsMain") { DEL dependencies { DEL implementation(kotlin("stdlib-js")) DEL } DEL } DEL named("jsTest") { DEL dependencies { DEL implementation(kotlin("test-js")) CON }
<<<<<<< SEARCH plugins { kotlin("multiplatform") `maven-publish` } val KOTLIN_LANGUAGE_VERSION: String by extra val KOTLIN_JVM_TARGET: String by extra kotlin { js { compilations.all { kotlinOptions { moduleKind = "amd" sourceMap = true sourceMapEmbedSources = "always" main = "noCall" } } } jvm { compilations.all { kotlinOptions { jvmTarget = KOTLIN_JVM_TARGET } } } targets.all { compilations.all { kotlinOptions { languageVersion = KOTLIN_LANGUAGE_VERSION apiVersion = KOTLIN_LANGUAGE_VERSION verbose = true } } } sourceSets { commonMain { dependencies { implementation(kotlin("stdlib-common")) } } commonTest { dependencies { implementation(kotlin("test-common")) implementation(kotlin("test-annotations-common")) implementation(project(":acornui-test-utils")) } } named("jvmMain") { dependencies { implementation(kotlin("stdlib-jdk8")) } } named("jvmTest") { dependencies { implementation(kotlin("test")) implementation(kotlin("test-junit")) } } named("jsMain") { dependencies { implementation(kotlin("stdlib-js")) } } named("jsTest") { dependencies { implementation(kotlin("test-js")) } } ======= plugins { id("com.polyforest.acornui.basic") `maven-publish` } kotlin { sourceSets { commonTest { dependencies { implementation(project(":acornui-test-utils")) } } >>>>>>> REPLACE
tree-sitter/py-tree-sitter
676a1d3c74e526bd8cc67e97d89db2da7e207637
setup.py
python
mit
Remove an incorrect documentation URL Fixes #9.
""" Py-Tree-sitter """ import platform from setuptools import setup, Extension setup( name = "tree_sitter", version = "0.0.8", maintainer = "Max Brunsfeld", maintainer_email = "[email protected]", author = "Max Brunsfeld", author_email = "[email protected]", url = "https://github.com/tree-sitter/py-tree-sitter", license = "MIT", platforms = ["any"], python_requires = ">=3.3", description = "Python bindings to the Tree-sitter parsing library", classifiers = [ "License :: OSI Approved :: MIT License", "Topic :: Software Development :: Compilers", "Topic :: Text Processing :: Linguistic", ], packages = ['tree_sitter'], ext_modules = [ Extension( "tree_sitter_binding", [ "tree_sitter/core/lib/src/lib.c", "tree_sitter/binding.c", ], include_dirs = [ "tree_sitter/core/lib/include", "tree_sitter/core/lib/utf8proc", ], extra_compile_args = ( ['-std=c99'] if platform.system() != 'Windows' else None ) ) ], project_urls = { 'Source': 'https://github.com/tree-sitter/py-tree-sitter', 'Documentation': 'http://initd.org/psycopg/docs/', } )
""" Py-Tree-sitter """ import platform from setuptools import setup, Extension setup( name = "tree_sitter", version = "0.0.8", maintainer = "Max Brunsfeld", maintainer_email = "[email protected]", author = "Max Brunsfeld", author_email = "[email protected]", url = "https://github.com/tree-sitter/py-tree-sitter", license = "MIT", platforms = ["any"], python_requires = ">=3.3", description = "Python bindings to the Tree-sitter parsing library", classifiers = [ "License :: OSI Approved :: MIT License", "Topic :: Software Development :: Compilers", "Topic :: Text Processing :: Linguistic", ], packages = ['tree_sitter'], ext_modules = [ Extension( "tree_sitter_binding", [ "tree_sitter/core/lib/src/lib.c", "tree_sitter/binding.c", ], include_dirs = [ "tree_sitter/core/lib/include", "tree_sitter/core/lib/utf8proc", ], extra_compile_args = ( ['-std=c99'] if platform.system() != 'Windows' else None ) ) ], project_urls = { 'Source': 'https://github.com/tree-sitter/py-tree-sitter', } )
0
1
1
del_only
--- a/setup.py +++ b/setup.py @@ -45,3 +45,2 @@ 'Source': 'https://github.com/tree-sitter/py-tree-sitter', - 'Documentation': 'http://initd.org/psycopg/docs/', }
--- a/setup.py +++ b/setup.py @@ ... @@ 'Source': 'https://github.com/tree-sitter/py-tree-sitter', - 'Documentation': 'http://initd.org/psycopg/docs/', }
--- a/setup.py +++ b/setup.py @@ -45,3 +45,2 @@ CON 'Source': 'https://github.com/tree-sitter/py-tree-sitter', DEL 'Documentation': 'http://initd.org/psycopg/docs/', CON }
<<<<<<< SEARCH project_urls = { 'Source': 'https://github.com/tree-sitter/py-tree-sitter', 'Documentation': 'http://initd.org/psycopg/docs/', } ) ======= project_urls = { 'Source': 'https://github.com/tree-sitter/py-tree-sitter', } ) >>>>>>> REPLACE
gkoz/gir
8d2ed87fc4784cf32e84513c70f686cedf92adb7
src/analysis/special_functions.rs
rust
mit
Mark some special functions as hidden instead of deleting them
use std::collections::BTreeMap; use std::str::FromStr; use analysis::functions::Info as FuncInfo; #[derive(Eq, Debug, Ord, PartialEq, PartialOrd)] pub enum Type { Copy, Free, Ref, Unref, } impl FromStr for Type { type Err = (); fn from_str(s: &str) -> Result<Type, ()> { use self::Type::*; match s { "copy" => Ok(Copy), "free" => Ok(Free), "ref" => Ok(Ref), "unref" => Ok(Unref), _ => Err(()), } } } pub type Infos = BTreeMap<Type, String>; //Type => glib_name pub fn extract(functions: &mut Vec<FuncInfo>) -> Infos { let mut specials = BTreeMap::new(); functions.retain(|func| { match Type::from_str(&*func.name) { Ok(type_) => { specials.insert(type_, func.glib_name.clone()); false } Err(_) => true, } }); specials }
use std::collections::BTreeMap; use std::str::FromStr; use analysis::functions::Info as FuncInfo; use analysis::functions::Visibility; #[derive(Clone, Copy, Eq, Debug, Ord, PartialEq, PartialOrd)] pub enum Type { Copy, Free, Ref, Unref, } impl FromStr for Type { type Err = (); fn from_str(s: &str) -> Result<Type, ()> { use self::Type::*; match s { "copy" => Ok(Copy), "free" => Ok(Free), "ref" => Ok(Ref), "unref" => Ok(Unref), _ => Err(()), } } } pub type Infos = BTreeMap<Type, String>; //Type => glib_name pub fn extract(functions: &mut Vec<FuncInfo>) -> Infos { let mut specials = BTreeMap::new(); for func in functions.iter_mut() { if let Ok(type_) = Type::from_str(&func.name) { func.visibility = visibility(type_); specials.insert(type_, func.glib_name.clone()); } } specials } fn visibility(t: Type) -> Visibility { use self::Type::*; match t { Copy | Free | Ref | Unref => Visibility::Hidden, } }
17
9
3
mixed
--- a/src/analysis/special_functions.rs +++ b/src/analysis/special_functions.rs @@ -4,4 +4,5 @@ use analysis::functions::Info as FuncInfo; +use analysis::functions::Visibility; -#[derive(Eq, Debug, Ord, PartialEq, PartialOrd)] +#[derive(Clone, Copy, Eq, Debug, Ord, PartialEq, PartialOrd)] pub enum Type { @@ -33,11 +34,8 @@ - functions.retain(|func| { - match Type::from_str(&*func.name) { - Ok(type_) => { - specials.insert(type_, func.glib_name.clone()); - false - } - Err(_) => true, + for func in functions.iter_mut() { + if let Ok(type_) = Type::from_str(&func.name) { + func.visibility = visibility(type_); + specials.insert(type_, func.glib_name.clone()); } - }); + } @@ -45 +43,11 @@ } + +fn visibility(t: Type) -> Visibility { + use self::Type::*; + match t { + Copy | + Free | + Ref | + Unref => Visibility::Hidden, + } +}
--- a/src/analysis/special_functions.rs +++ b/src/analysis/special_functions.rs @@ ... @@ use analysis::functions::Info as FuncInfo; +use analysis::functions::Visibility; -#[derive(Eq, Debug, Ord, PartialEq, PartialOrd)] +#[derive(Clone, Copy, Eq, Debug, Ord, PartialEq, PartialOrd)] pub enum Type { @@ ... @@ - functions.retain(|func| { - match Type::from_str(&*func.name) { - Ok(type_) => { - specials.insert(type_, func.glib_name.clone()); - false - } - Err(_) => true, + for func in functions.iter_mut() { + if let Ok(type_) = Type::from_str(&func.name) { + func.visibility = visibility(type_); + specials.insert(type_, func.glib_name.clone()); } - }); + } @@ ... @@ } + +fn visibility(t: Type) -> Visibility { + use self::Type::*; + match t { + Copy | + Free | + Ref | + Unref => Visibility::Hidden, + } +}
--- a/src/analysis/special_functions.rs +++ b/src/analysis/special_functions.rs @@ -4,4 +4,5 @@ CON use analysis::functions::Info as FuncInfo; ADD use analysis::functions::Visibility; CON DEL #[derive(Eq, Debug, Ord, PartialEq, PartialOrd)] ADD #[derive(Clone, Copy, Eq, Debug, Ord, PartialEq, PartialOrd)] CON pub enum Type { @@ -33,11 +34,8 @@ CON DEL functions.retain(|func| { DEL match Type::from_str(&*func.name) { DEL Ok(type_) => { DEL specials.insert(type_, func.glib_name.clone()); DEL false DEL } DEL Err(_) => true, ADD for func in functions.iter_mut() { ADD if let Ok(type_) = Type::from_str(&func.name) { ADD func.visibility = visibility(type_); ADD specials.insert(type_, func.glib_name.clone()); CON } DEL }); ADD } CON @@ -45 +43,11 @@ CON } ADD ADD fn visibility(t: Type) -> Visibility { ADD use self::Type::*; ADD match t { ADD Copy | ADD Free | ADD Ref | ADD Unref => Visibility::Hidden, ADD } ADD }
<<<<<<< SEARCH use analysis::functions::Info as FuncInfo; #[derive(Eq, Debug, Ord, PartialEq, PartialOrd)] pub enum Type { Copy, ======= use analysis::functions::Info as FuncInfo; use analysis::functions::Visibility; #[derive(Clone, Copy, Eq, Debug, Ord, PartialEq, PartialOrd)] pub enum Type { Copy, >>>>>>> REPLACE <<<<<<< SEARCH let mut specials = BTreeMap::new(); functions.retain(|func| { match Type::from_str(&*func.name) { Ok(type_) => { specials.insert(type_, func.glib_name.clone()); false } Err(_) => true, } }); specials } ======= let mut specials = BTreeMap::new(); for func in functions.iter_mut() { if let Ok(type_) = Type::from_str(&func.name) { func.visibility = visibility(type_); specials.insert(type_, func.glib_name.clone()); } } specials } fn visibility(t: Type) -> Visibility { use self::Type::*; match t { Copy | Free | Ref | Unref => Visibility::Hidden, } } >>>>>>> REPLACE
ilg/LambdaMLM
a50aeb81a588f8297f194d793cb8f8cf0e15a411
lambda/list_member.py
python
mit
Convert list member addresses to non-unicode strings when possible.
from __future__ import print_function from enum import IntEnum import yaml MemberFlag = IntEnum('MemberFlag', [ 'digest', 'digest2', 'modPost', 'preapprove', 'noPost', 'diagnostic', 'moderator', 'myopic', 'superadmin', 'admin', 'protected', 'ccErrors', 'reports', 'vacation', 'ackPost', 'echoPost', 'hidden', ]) def member_flag_representer(dumper, data): return dumper.represent_scalar(u'!flag', data.name) yaml.add_representer(MemberFlag, member_flag_representer) def member_flag_constructor(loader, node): value = loader.construct_scalar(node) return MemberFlag[value] yaml.SafeLoader.add_constructor(u'!flag', member_flag_constructor) class ListMember(yaml.YAMLObject): yaml_tag = u'!Member' yaml_loader = yaml.SafeLoader def __init__(self, address, *args, **kwargs): self.address = address self.flags = set(a for a in args if isinstance(a, MemberFlag)) def __repr__(self): return u'{}({}, flags: {})'.format( self.__class__.__name__, self.address, ', '.join( map(lambda f: f.name, self.flags) ), )
from __future__ import print_function from enum import IntEnum import yaml MemberFlag = IntEnum('MemberFlag', [ 'digest', 'digest2', 'modPost', 'preapprove', 'noPost', 'diagnostic', 'moderator', 'myopic', 'superadmin', 'admin', 'protected', 'ccErrors', 'reports', 'vacation', 'ackPost', 'echoPost', 'hidden', ]) def member_flag_representer(dumper, data): return dumper.represent_scalar(u'!flag', data.name) yaml.add_representer(MemberFlag, member_flag_representer) def member_flag_constructor(loader, node): value = loader.construct_scalar(node) return MemberFlag[value] yaml.SafeLoader.add_constructor(u'!flag', member_flag_constructor) class ListMember(yaml.YAMLObject): yaml_tag = u'!Member' yaml_loader = yaml.SafeLoader def __init__(self, address, *args, **kwargs): if isinstance(address, unicode): # Attempt to down-convert unicode-string addresses to plain strings try: address = str(address) except UnicodeEncodeError: pass self.address = address self.flags = set(a for a in args if isinstance(a, MemberFlag)) def __repr__(self): return u'{}({}, flags: {})'.format( self.__class__.__name__, self.address, ', '.join( map(lambda f: f.name, self.flags) ), )
6
0
1
add_only
--- a/lambda/list_member.py +++ b/lambda/list_member.py @@ -37,2 +37,8 @@ def __init__(self, address, *args, **kwargs): + if isinstance(address, unicode): + # Attempt to down-convert unicode-string addresses to plain strings + try: + address = str(address) + except UnicodeEncodeError: + pass self.address = address
--- a/lambda/list_member.py +++ b/lambda/list_member.py @@ ... @@ def __init__(self, address, *args, **kwargs): + if isinstance(address, unicode): + # Attempt to down-convert unicode-string addresses to plain strings + try: + address = str(address) + except UnicodeEncodeError: + pass self.address = address
--- a/lambda/list_member.py +++ b/lambda/list_member.py @@ -37,2 +37,8 @@ CON def __init__(self, address, *args, **kwargs): ADD if isinstance(address, unicode): ADD # Attempt to down-convert unicode-string addresses to plain strings ADD try: ADD address = str(address) ADD except UnicodeEncodeError: ADD pass CON self.address = address
<<<<<<< SEARCH yaml_loader = yaml.SafeLoader def __init__(self, address, *args, **kwargs): self.address = address self.flags = set(a for a in args if isinstance(a, MemberFlag)) ======= yaml_loader = yaml.SafeLoader def __init__(self, address, *args, **kwargs): if isinstance(address, unicode): # Attempt to down-convert unicode-string addresses to plain strings try: address = str(address) except UnicodeEncodeError: pass self.address = address self.flags = set(a for a in args if isinstance(a, MemberFlag)) >>>>>>> REPLACE
aroben/html5ever
7db5926da7cb61ac806d99bd48462f8c98bf744c
test/bin.rs
rust
apache-2.0
Add env vars to control which tests we load Loading consumes a fair amount of time before the point where the built-in test infrastructure can filter which tests to run.
// Copyright 2014 The HTML5 for Rust Project Developers. See the // COPYRIGHT file at the top-level directory of this distribution. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. #![crate_name="html5-external-test"] #![crate_type="bin"] #![feature(macro_rules)] extern crate test; extern crate serialize; extern crate debug; extern crate html5; use std::os; use std::from_str::FromStr; use test::test_main; mod tokenizer; mod tree_builder; mod util; fn main() { let src_dir: Path = FromStr::from_str( os::getenv("HTML5_SRC_DIR").expect("HTML5_SRC_DIR not set").as_slice() ).expect("HTML5_SRC_DIR invalid"); let mut tests = vec!(); tests.push_all_move(tokenizer::tests(src_dir.clone())); tests.push_all_move(tree_builder::tests(src_dir)); let args: Vec<String> = os::args().move_iter().collect(); test_main(args.as_slice(), tests); }
// Copyright 2014 The HTML5 for Rust Project Developers. See the // COPYRIGHT file at the top-level directory of this distribution. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. #![crate_name="html5-external-test"] #![crate_type="bin"] #![feature(macro_rules)] extern crate test; extern crate serialize; extern crate debug; extern crate html5; use std::os; use std::from_str::FromStr; use test::test_main; mod tokenizer; mod tree_builder; mod util; fn main() { let src_dir: Path = FromStr::from_str( os::getenv("HTML5_SRC_DIR").expect("HTML5_SRC_DIR not set").as_slice() ).expect("HTML5_SRC_DIR invalid"); let mut tests = vec!(); if os::getenv("HTML5_NO_TOK_TEST").is_none() { tests.push_all_move(tokenizer::tests(src_dir.clone())); } if os::getenv("HTM5_NO_TB_TEST").is_none() { tests.push_all_move(tree_builder::tests(src_dir)); } let args: Vec<String> = os::args().move_iter().collect(); test_main(args.as_slice(), tests); }
7
2
1
mixed
--- a/test/bin.rs +++ b/test/bin.rs @@ -35,4 +35,9 @@ - tests.push_all_move(tokenizer::tests(src_dir.clone())); - tests.push_all_move(tree_builder::tests(src_dir)); + if os::getenv("HTML5_NO_TOK_TEST").is_none() { + tests.push_all_move(tokenizer::tests(src_dir.clone())); + } + + if os::getenv("HTM5_NO_TB_TEST").is_none() { + tests.push_all_move(tree_builder::tests(src_dir)); + }
--- a/test/bin.rs +++ b/test/bin.rs @@ ... @@ - tests.push_all_move(tokenizer::tests(src_dir.clone())); - tests.push_all_move(tree_builder::tests(src_dir)); + if os::getenv("HTML5_NO_TOK_TEST").is_none() { + tests.push_all_move(tokenizer::tests(src_dir.clone())); + } + + if os::getenv("HTM5_NO_TB_TEST").is_none() { + tests.push_all_move(tree_builder::tests(src_dir)); + }
--- a/test/bin.rs +++ b/test/bin.rs @@ -35,4 +35,9 @@ CON DEL tests.push_all_move(tokenizer::tests(src_dir.clone())); DEL tests.push_all_move(tree_builder::tests(src_dir)); ADD if os::getenv("HTML5_NO_TOK_TEST").is_none() { ADD tests.push_all_move(tokenizer::tests(src_dir.clone())); ADD } ADD ADD if os::getenv("HTM5_NO_TB_TEST").is_none() { ADD tests.push_all_move(tree_builder::tests(src_dir)); ADD } CON
<<<<<<< SEARCH let mut tests = vec!(); tests.push_all_move(tokenizer::tests(src_dir.clone())); tests.push_all_move(tree_builder::tests(src_dir)); let args: Vec<String> = os::args().move_iter().collect(); ======= let mut tests = vec!(); if os::getenv("HTML5_NO_TOK_TEST").is_none() { tests.push_all_move(tokenizer::tests(src_dir.clone())); } if os::getenv("HTM5_NO_TB_TEST").is_none() { tests.push_all_move(tree_builder::tests(src_dir)); } let args: Vec<String> = os::args().move_iter().collect(); >>>>>>> REPLACE
iankronquist/rustyvisor
22ef0369784bab0727b9fc09027aef27dd7eb93d
src/cpu.rs
rust
mit
Implement Default::default & new for PerCoreVariable
use vmx; use collections::vec::Vec; use core::sync::atomic::{ATOMIC_U16_INIT, AtomicU16, Ordering}; use spin; static CPU_COUNT: spin::Once<u16> = spin::Once::new(); static CPU_ASSIGNMENT: AtomicU16 = ATOMIC_U16_INIT; pub fn init(count: u16) { CPU_COUNT.call_once(||{ count }); } pub fn bring_core_online() { let cpu_num = CPU_ASSIGNMENT.fetch_add(1, Ordering::Relaxed); set_number(cpu_num); } fn set_number(num: u16) { vmx::write_es(num); } pub fn get_number() -> u16 { vmx::read_es() } #[derive(Default)] pub struct PerCoreVariable<T> { vars: Vec<T>, } impl<T> PerCoreVariable<T> { pub fn get(&self) -> &T { &self.vars[get_number() as usize] } }
use vmx; use collections::vec::Vec; use core::sync::atomic::{ATOMIC_U16_INIT, AtomicU16, Ordering}; use spin; static CPU_COUNT: spin::Once<u16> = spin::Once::new(); static CPU_ASSIGNMENT: AtomicU16 = ATOMIC_U16_INIT; pub fn init(count: u16) { CPU_COUNT.call_once(||{ count }); } fn get_cpu_count() -> u16 { *CPU_COUNT.call_once(|| { panic!("Must initialize CPU count before requesting it"); }) } pub fn bring_core_online() { let cpu_num = CPU_ASSIGNMENT.fetch_add(1, Ordering::Relaxed); set_number(cpu_num); } fn set_number(num: u16) { vmx::write_es(num); } pub fn get_number() -> u16 { vmx::read_es() } pub struct PerCoreVariable<T> { vars: Vec<T>, } impl<T: Default> Default for PerCoreVariable<T> { fn default() -> Self { let mut vars = vec![]; for _ in 0..get_cpu_count() { vars.push(Default::default()); } PerCoreVariable { vars: vars } } } impl<T> PerCoreVariable<T> { pub fn get(&self) -> &T { &self.vars[get_number() as usize] } } impl<T: Clone> PerCoreVariable<T> { pub fn new(item: T) -> Self { PerCoreVariable { vars: vec![item; get_cpu_count() as usize] } } }
24
1
3
mixed
--- a/src/cpu.rs +++ b/src/cpu.rs @@ -12,2 +12,8 @@ CPU_COUNT.call_once(||{ count }); +} + +fn get_cpu_count() -> u16 { + *CPU_COUNT.call_once(|| { + panic!("Must initialize CPU count before requesting it"); + }) } @@ -31,5 +37,15 @@ -#[derive(Default)] pub struct PerCoreVariable<T> { vars: Vec<T>, +} + + +impl<T: Default> Default for PerCoreVariable<T> { + fn default() -> Self { + let mut vars = vec![]; + for _ in 0..get_cpu_count() { + vars.push(Default::default()); + } + PerCoreVariable { vars: vars } + } } @@ -42 +58,8 @@ } + + +impl<T: Clone> PerCoreVariable<T> { + pub fn new(item: T) -> Self { + PerCoreVariable { vars: vec![item; get_cpu_count() as usize] } + } +}
--- a/src/cpu.rs +++ b/src/cpu.rs @@ ... @@ CPU_COUNT.call_once(||{ count }); +} + +fn get_cpu_count() -> u16 { + *CPU_COUNT.call_once(|| { + panic!("Must initialize CPU count before requesting it"); + }) } @@ ... @@ -#[derive(Default)] pub struct PerCoreVariable<T> { vars: Vec<T>, +} + + +impl<T: Default> Default for PerCoreVariable<T> { + fn default() -> Self { + let mut vars = vec![]; + for _ in 0..get_cpu_count() { + vars.push(Default::default()); + } + PerCoreVariable { vars: vars } + } } @@ ... @@ } + + +impl<T: Clone> PerCoreVariable<T> { + pub fn new(item: T) -> Self { + PerCoreVariable { vars: vec![item; get_cpu_count() as usize] } + } +}
--- a/src/cpu.rs +++ b/src/cpu.rs @@ -12,2 +12,8 @@ CON CPU_COUNT.call_once(||{ count }); ADD } ADD ADD fn get_cpu_count() -> u16 { ADD *CPU_COUNT.call_once(|| { ADD panic!("Must initialize CPU count before requesting it"); ADD }) CON } @@ -31,5 +37,15 @@ CON DEL #[derive(Default)] CON pub struct PerCoreVariable<T> { CON vars: Vec<T>, ADD } ADD ADD ADD impl<T: Default> Default for PerCoreVariable<T> { ADD fn default() -> Self { ADD let mut vars = vec![]; ADD for _ in 0..get_cpu_count() { ADD vars.push(Default::default()); ADD } ADD PerCoreVariable { vars: vars } ADD } CON } @@ -42 +58,8 @@ CON } ADD ADD ADD impl<T: Clone> PerCoreVariable<T> { ADD pub fn new(item: T) -> Self { ADD PerCoreVariable { vars: vec![item; get_cpu_count() as usize] } ADD } ADD }
<<<<<<< SEARCH pub fn init(count: u16) { CPU_COUNT.call_once(||{ count }); } ======= pub fn init(count: u16) { CPU_COUNT.call_once(||{ count }); } fn get_cpu_count() -> u16 { *CPU_COUNT.call_once(|| { panic!("Must initialize CPU count before requesting it"); }) } >>>>>>> REPLACE <<<<<<< SEARCH #[derive(Default)] pub struct PerCoreVariable<T> { vars: Vec<T>, } ======= pub struct PerCoreVariable<T> { vars: Vec<T>, } impl<T: Default> Default for PerCoreVariable<T> { fn default() -> Self { let mut vars = vec![]; for _ in 0..get_cpu_count() { vars.push(Default::default()); } PerCoreVariable { vars: vars } } } >>>>>>> REPLACE <<<<<<< SEARCH } } ======= } } impl<T: Clone> PerCoreVariable<T> { pub fn new(item: T) -> Self { PerCoreVariable { vars: vec![item; get_cpu_count() as usize] } } } >>>>>>> REPLACE
geneanet/customCameraAngular
f8ecdcdee9fe2d873bd2520293ac4a10ec64d5d7
test/spec/GeneanetCustomCamera.js
javascript
bsd-3-clause
Complete the test to configure the service.
'use strict'; describe('geneanetCustomCamera', function() { beforeEach(module('geneanetCustomCamera')); beforeEach(module(function(geneanetCustomCameraProvider) { return function() { geneanetCustomCameraProvider.setOptionMiniature(false); }; })); it('should have the miniature option disabled.', function() { inject(function(geneanetCustomCamera) { expect(geneanetCustomCamera.getOptionMiniature()).not.toBeTruthy(); }); }); });
'use strict'; describe('geneanetCustomCamera', function() { beforeEach(module('geneanetCustomCamera')); it('should have options values as values set by the provider', function() { var optionMiniature = false; var optionSaveInGallery = true; var optionCameraBackgroundColor = '#012345'; var optionCameraBackgroundColorPressed = '#678901'; var optionQuality = '12'; var optionOpacity = false; var optionSwitchFlash = true; var optionSwitchCamera = false; // init service module(function(geneanetCustomCameraProvider) { return function() { geneanetCustomCameraProvider.setOptionMiniature(optionMiniature); geneanetCustomCameraProvider.setOptionSaveInGallery(optionSaveInGallery); geneanetCustomCameraProvider.setOptionCameraBackgroundColor(optionCameraBackgroundColor); geneanetCustomCameraProvider.setOptionCameraBackgroundColorPressed(optionCameraBackgroundColorPressed); geneanetCustomCameraProvider.setOptionQuality(optionQuality); geneanetCustomCameraProvider.setOptionOpacity(optionOpacity); geneanetCustomCameraProvider.setOptionSwitchFlash(optionSwitchFlash); geneanetCustomCameraProvider.setOptionSwitchCamera(optionSwitchCamera); }; }); inject(function(geneanetCustomCamera) { expect(geneanetCustomCamera.getOptionMiniature()).toEqual(optionMiniature); expect(geneanetCustomCamera.getOptionSaveInGallery()).toEqual(optionSaveInGallery); expect(geneanetCustomCamera.getOptionCameraBackgroundColor()).toEqual(optionCameraBackgroundColor); expect(geneanetCustomCamera.getOptionCameraBackgroundColorPressed()).toEqual(optionCameraBackgroundColorPressed); expect(geneanetCustomCamera.getOptionQuality()).toEqual(optionQuality); expect(geneanetCustomCamera.getOptionOpacity()).toEqual(optionOpacity); expect(geneanetCustomCamera.getOptionSwitchFlash()).toEqual(optionSwitchFlash); expect(geneanetCustomCamera.getOptionSwitchCamera()).toEqual(optionSwitchCamera); }); // @TODO: test set default flash }); });
33
7
1
mixed
--- a/test/spec/GeneanetCustomCamera.js +++ b/test/spec/GeneanetCustomCamera.js @@ -5,12 +5,38 @@ - beforeEach(module(function(geneanetCustomCameraProvider) { - return function() { - geneanetCustomCameraProvider.setOptionMiniature(false); - }; - })); + it('should have options values as values set by the provider', function() { + var optionMiniature = false; + var optionSaveInGallery = true; + var optionCameraBackgroundColor = '#012345'; + var optionCameraBackgroundColorPressed = '#678901'; + var optionQuality = '12'; + var optionOpacity = false; + var optionSwitchFlash = true; + var optionSwitchCamera = false; - it('should have the miniature option disabled.', function() { + // init service + module(function(geneanetCustomCameraProvider) { + return function() { + geneanetCustomCameraProvider.setOptionMiniature(optionMiniature); + geneanetCustomCameraProvider.setOptionSaveInGallery(optionSaveInGallery); + geneanetCustomCameraProvider.setOptionCameraBackgroundColor(optionCameraBackgroundColor); + geneanetCustomCameraProvider.setOptionCameraBackgroundColorPressed(optionCameraBackgroundColorPressed); + geneanetCustomCameraProvider.setOptionQuality(optionQuality); + geneanetCustomCameraProvider.setOptionOpacity(optionOpacity); + geneanetCustomCameraProvider.setOptionSwitchFlash(optionSwitchFlash); + geneanetCustomCameraProvider.setOptionSwitchCamera(optionSwitchCamera); + }; + }); + inject(function(geneanetCustomCamera) { - expect(geneanetCustomCamera.getOptionMiniature()).not.toBeTruthy(); + expect(geneanetCustomCamera.getOptionMiniature()).toEqual(optionMiniature); + expect(geneanetCustomCamera.getOptionSaveInGallery()).toEqual(optionSaveInGallery); + expect(geneanetCustomCamera.getOptionCameraBackgroundColor()).toEqual(optionCameraBackgroundColor); + expect(geneanetCustomCamera.getOptionCameraBackgroundColorPressed()).toEqual(optionCameraBackgroundColorPressed); + expect(geneanetCustomCamera.getOptionQuality()).toEqual(optionQuality); + expect(geneanetCustomCamera.getOptionOpacity()).toEqual(optionOpacity); + expect(geneanetCustomCamera.getOptionSwitchFlash()).toEqual(optionSwitchFlash); + expect(geneanetCustomCamera.getOptionSwitchCamera()).toEqual(optionSwitchCamera); }); + + // @TODO: test set default flash });
--- a/test/spec/GeneanetCustomCamera.js +++ b/test/spec/GeneanetCustomCamera.js @@ ... @@ - beforeEach(module(function(geneanetCustomCameraProvider) { - return function() { - geneanetCustomCameraProvider.setOptionMiniature(false); - }; - })); + it('should have options values as values set by the provider', function() { + var optionMiniature = false; + var optionSaveInGallery = true; + var optionCameraBackgroundColor = '#012345'; + var optionCameraBackgroundColorPressed = '#678901'; + var optionQuality = '12'; + var optionOpacity = false; + var optionSwitchFlash = true; + var optionSwitchCamera = false; - it('should have the miniature option disabled.', function() { + // init service + module(function(geneanetCustomCameraProvider) { + return function() { + geneanetCustomCameraProvider.setOptionMiniature(optionMiniature); + geneanetCustomCameraProvider.setOptionSaveInGallery(optionSaveInGallery); + geneanetCustomCameraProvider.setOptionCameraBackgroundColor(optionCameraBackgroundColor); + geneanetCustomCameraProvider.setOptionCameraBackgroundColorPressed(optionCameraBackgroundColorPressed); + geneanetCustomCameraProvider.setOptionQuality(optionQuality); + geneanetCustomCameraProvider.setOptionOpacity(optionOpacity); + geneanetCustomCameraProvider.setOptionSwitchFlash(optionSwitchFlash); + geneanetCustomCameraProvider.setOptionSwitchCamera(optionSwitchCamera); + }; + }); + inject(function(geneanetCustomCamera) { - expect(geneanetCustomCamera.getOptionMiniature()).not.toBeTruthy(); + expect(geneanetCustomCamera.getOptionMiniature()).toEqual(optionMiniature); + expect(geneanetCustomCamera.getOptionSaveInGallery()).toEqual(optionSaveInGallery); + expect(geneanetCustomCamera.getOptionCameraBackgroundColor()).toEqual(optionCameraBackgroundColor); + expect(geneanetCustomCamera.getOptionCameraBackgroundColorPressed()).toEqual(optionCameraBackgroundColorPressed); + expect(geneanetCustomCamera.getOptionQuality()).toEqual(optionQuality); + expect(geneanetCustomCamera.getOptionOpacity()).toEqual(optionOpacity); + expect(geneanetCustomCamera.getOptionSwitchFlash()).toEqual(optionSwitchFlash); + expect(geneanetCustomCamera.getOptionSwitchCamera()).toEqual(optionSwitchCamera); }); + + // @TODO: test set default flash });
--- a/test/spec/GeneanetCustomCamera.js +++ b/test/spec/GeneanetCustomCamera.js @@ -5,12 +5,38 @@ CON DEL beforeEach(module(function(geneanetCustomCameraProvider) { DEL return function() { DEL geneanetCustomCameraProvider.setOptionMiniature(false); DEL }; DEL })); ADD it('should have options values as values set by the provider', function() { ADD var optionMiniature = false; ADD var optionSaveInGallery = true; ADD var optionCameraBackgroundColor = '#012345'; ADD var optionCameraBackgroundColorPressed = '#678901'; ADD var optionQuality = '12'; ADD var optionOpacity = false; ADD var optionSwitchFlash = true; ADD var optionSwitchCamera = false; CON DEL it('should have the miniature option disabled.', function() { ADD // init service ADD module(function(geneanetCustomCameraProvider) { ADD return function() { ADD geneanetCustomCameraProvider.setOptionMiniature(optionMiniature); ADD geneanetCustomCameraProvider.setOptionSaveInGallery(optionSaveInGallery); ADD geneanetCustomCameraProvider.setOptionCameraBackgroundColor(optionCameraBackgroundColor); ADD geneanetCustomCameraProvider.setOptionCameraBackgroundColorPressed(optionCameraBackgroundColorPressed); ADD geneanetCustomCameraProvider.setOptionQuality(optionQuality); ADD geneanetCustomCameraProvider.setOptionOpacity(optionOpacity); ADD geneanetCustomCameraProvider.setOptionSwitchFlash(optionSwitchFlash); ADD geneanetCustomCameraProvider.setOptionSwitchCamera(optionSwitchCamera); ADD }; ADD }); ADD CON inject(function(geneanetCustomCamera) { DEL expect(geneanetCustomCamera.getOptionMiniature()).not.toBeTruthy(); ADD expect(geneanetCustomCamera.getOptionMiniature()).toEqual(optionMiniature); ADD expect(geneanetCustomCamera.getOptionSaveInGallery()).toEqual(optionSaveInGallery); ADD expect(geneanetCustomCamera.getOptionCameraBackgroundColor()).toEqual(optionCameraBackgroundColor); ADD expect(geneanetCustomCamera.getOptionCameraBackgroundColorPressed()).toEqual(optionCameraBackgroundColorPressed); ADD expect(geneanetCustomCamera.getOptionQuality()).toEqual(optionQuality); ADD expect(geneanetCustomCamera.getOptionOpacity()).toEqual(optionOpacity); ADD expect(geneanetCustomCamera.getOptionSwitchFlash()).toEqual(optionSwitchFlash); ADD expect(geneanetCustomCamera.getOptionSwitchCamera()).toEqual(optionSwitchCamera); CON }); ADD ADD // @TODO: test set default flash CON });
<<<<<<< SEARCH beforeEach(module('geneanetCustomCamera')); beforeEach(module(function(geneanetCustomCameraProvider) { return function() { geneanetCustomCameraProvider.setOptionMiniature(false); }; })); it('should have the miniature option disabled.', function() { inject(function(geneanetCustomCamera) { expect(geneanetCustomCamera.getOptionMiniature()).not.toBeTruthy(); }); }); }); ======= beforeEach(module('geneanetCustomCamera')); it('should have options values as values set by the provider', function() { var optionMiniature = false; var optionSaveInGallery = true; var optionCameraBackgroundColor = '#012345'; var optionCameraBackgroundColorPressed = '#678901'; var optionQuality = '12'; var optionOpacity = false; var optionSwitchFlash = true; var optionSwitchCamera = false; // init service module(function(geneanetCustomCameraProvider) { return function() { geneanetCustomCameraProvider.setOptionMiniature(optionMiniature); geneanetCustomCameraProvider.setOptionSaveInGallery(optionSaveInGallery); geneanetCustomCameraProvider.setOptionCameraBackgroundColor(optionCameraBackgroundColor); geneanetCustomCameraProvider.setOptionCameraBackgroundColorPressed(optionCameraBackgroundColorPressed); geneanetCustomCameraProvider.setOptionQuality(optionQuality); geneanetCustomCameraProvider.setOptionOpacity(optionOpacity); geneanetCustomCameraProvider.setOptionSwitchFlash(optionSwitchFlash); geneanetCustomCameraProvider.setOptionSwitchCamera(optionSwitchCamera); }; }); inject(function(geneanetCustomCamera) { expect(geneanetCustomCamera.getOptionMiniature()).toEqual(optionMiniature); expect(geneanetCustomCamera.getOptionSaveInGallery()).toEqual(optionSaveInGallery); expect(geneanetCustomCamera.getOptionCameraBackgroundColor()).toEqual(optionCameraBackgroundColor); expect(geneanetCustomCamera.getOptionCameraBackgroundColorPressed()).toEqual(optionCameraBackgroundColorPressed); expect(geneanetCustomCamera.getOptionQuality()).toEqual(optionQuality); expect(geneanetCustomCamera.getOptionOpacity()).toEqual(optionOpacity); expect(geneanetCustomCamera.getOptionSwitchFlash()).toEqual(optionSwitchFlash); expect(geneanetCustomCamera.getOptionSwitchCamera()).toEqual(optionSwitchCamera); }); // @TODO: test set default flash }); }); >>>>>>> REPLACE
LouisCAD/Splitties
f26e19b0fadc75ee99eba8e7a97eaf8433629ae4
buildSrc/src/main/kotlin/dependencies/DependenciesRepositories.kt
kotlin
apache-2.0
Add missing group in google repo content
/* * Copyright 2019 Louis Cognault Ayeva Derman. Use of this source code is governed by the Apache 2.0 license. */ @file:Suppress("PackageDirectoryMismatch", "SpellCheckingInspection") import org.gradle.api.artifacts.dsl.RepositoryHandler import org.gradle.kotlin.dsl.maven fun RepositoryHandler.setupForProject() { jcenter() mavenCentral().ensureGroupsStartingWith("com.jakewharton.", "com.squareup.") google().ensureGroups( "com.google.gms", "com.google.firebase", "io.fabric.sdk.android", "com.crashlytics.sdk.android", "org.chromium.net", "com.android" ).ensureGroupsStartingWith( "androidx.", "com.android.", "com.google.android.", "com.google.ar", "android.arch" ) maven(url = "https://maven.fabric.io/public").ensureGroups("io.fabric.tools") maven( url = "https://dl.bintray.com/louiscad/splitties-dev" ).ensureGroups("com.louiscad.splitties") maven( url = "https://dl.bintray.com/kotlin/kotlin-eap" ).ensureGroups("org.jetbrains.kotlin") maven( url = "https://oss.sonatype.org/content/repositories/snapshots" ).ensureGroups("org.androidannotations") }
/* * Copyright 2019 Louis Cognault Ayeva Derman. Use of this source code is governed by the Apache 2.0 license. */ @file:Suppress("PackageDirectoryMismatch", "SpellCheckingInspection") import org.gradle.api.artifacts.dsl.RepositoryHandler import org.gradle.kotlin.dsl.maven fun RepositoryHandler.setupForProject() { jcenter() mavenCentral().ensureGroupsStartingWith("com.jakewharton.", "com.squareup.") google().ensureGroups( "com.google.gms", "com.google.firebase", "com.google.test.platform", "io.fabric.sdk.android", "com.crashlytics.sdk.android", "org.chromium.net", "com.android" ).ensureGroupsStartingWith( "androidx.", "com.android.", "com.google.android.", "com.google.ar", "android.arch" ) maven(url = "https://maven.fabric.io/public").ensureGroups("io.fabric.tools") maven( url = "https://dl.bintray.com/louiscad/splitties-dev" ).ensureGroups("com.louiscad.splitties") maven( url = "https://dl.bintray.com/kotlin/kotlin-eap" ).ensureGroups("org.jetbrains.kotlin") maven( url = "https://oss.sonatype.org/content/repositories/snapshots" ).ensureGroups("org.androidannotations") }
1
0
1
add_only
--- a/buildSrc/src/main/kotlin/dependencies/DependenciesRepositories.kt +++ b/buildSrc/src/main/kotlin/dependencies/DependenciesRepositories.kt @@ -17,2 +17,3 @@ "com.google.firebase", + "com.google.test.platform", "io.fabric.sdk.android",
--- a/buildSrc/src/main/kotlin/dependencies/DependenciesRepositories.kt +++ b/buildSrc/src/main/kotlin/dependencies/DependenciesRepositories.kt @@ ... @@ "com.google.firebase", + "com.google.test.platform", "io.fabric.sdk.android",
--- a/buildSrc/src/main/kotlin/dependencies/DependenciesRepositories.kt +++ b/buildSrc/src/main/kotlin/dependencies/DependenciesRepositories.kt @@ -17,2 +17,3 @@ CON "com.google.firebase", ADD "com.google.test.platform", CON "io.fabric.sdk.android",
<<<<<<< SEARCH "com.google.gms", "com.google.firebase", "io.fabric.sdk.android", "com.crashlytics.sdk.android", ======= "com.google.gms", "com.google.firebase", "com.google.test.platform", "io.fabric.sdk.android", "com.crashlytics.sdk.android", >>>>>>> REPLACE
jmtuley/adventofcode2015
ff9539503615362d6cddf0b13cafc1672adfb6a1
day2/solution/src/main.rs
rust
mit
Implement naïve solution (TODO: replace with better use of iterators)
use std::io; use std::io::prelude::*; fn main() { let stdin = io::stdin(); for line in stdin.lock().lines() { println!("{}", line.unwrap()); } }
use std::io; use std::io::prelude::*; fn main() { let stdin = io::stdin(); let mut total = 0; for line in stdin.lock().lines() { let l = line.unwrap(); let split: Vec<i32> = l.split('x').map(|s| s.parse::<i32>().unwrap()).collect(); total += sqfootage(split[0], split[1], split[2]); } println!("Total is {} square feet.", total) } fn sqfootage(x:i32, y:i32, z:i32) -> i32 { let side1 = x * y; let side2 = y * z; let side3 = z * x; let slack = vmin(vec![side1, side2, side3]); return (2 * side1) + (2 * side2) + (2 * side3) + slack; } fn vmin(v:Vec<i32>) -> i32 { let mut m = v[0]; for e in v { if e < m { m = e; } } m } #[test] fn sqfootage_computes_area_with_slack() { assert_eq!(58, sqfootage(2, 3, 4)); assert_eq!(43, sqfootage(1, 1, 10)); } #[test] fn min_works() { assert_eq!(1, vmin(vec![1])); assert_eq!(1, vmin(vec![2, 1])); assert_eq!(1, vmin(vec![1, 2, 3])); assert_eq!(1, vmin(vec![2, 1, 3])); }
39
1
1
mixed
--- a/day2/solution/src/main.rs +++ b/day2/solution/src/main.rs @@ -5,5 +5,43 @@ let stdin = io::stdin(); + let mut total = 0; for line in stdin.lock().lines() { - println!("{}", line.unwrap()); + let l = line.unwrap(); + let split: Vec<i32> = l.split('x').map(|s| s.parse::<i32>().unwrap()).collect(); + total += sqfootage(split[0], split[1], split[2]); } + + println!("Total is {} square feet.", total) } + +fn sqfootage(x:i32, y:i32, z:i32) -> i32 { + let side1 = x * y; + let side2 = y * z; + let side3 = z * x; + let slack = vmin(vec![side1, side2, side3]); + return (2 * side1) + (2 * side2) + (2 * side3) + slack; +} + +fn vmin(v:Vec<i32>) -> i32 { + let mut m = v[0]; + for e in v { + if e < m { + m = e; + } + } + + m +} + +#[test] +fn sqfootage_computes_area_with_slack() { + assert_eq!(58, sqfootage(2, 3, 4)); + assert_eq!(43, sqfootage(1, 1, 10)); +} + +#[test] +fn min_works() { + assert_eq!(1, vmin(vec![1])); + assert_eq!(1, vmin(vec![2, 1])); + assert_eq!(1, vmin(vec![1, 2, 3])); + assert_eq!(1, vmin(vec![2, 1, 3])); +}
--- a/day2/solution/src/main.rs +++ b/day2/solution/src/main.rs @@ ... @@ let stdin = io::stdin(); + let mut total = 0; for line in stdin.lock().lines() { - println!("{}", line.unwrap()); + let l = line.unwrap(); + let split: Vec<i32> = l.split('x').map(|s| s.parse::<i32>().unwrap()).collect(); + total += sqfootage(split[0], split[1], split[2]); } + + println!("Total is {} square feet.", total) } + +fn sqfootage(x:i32, y:i32, z:i32) -> i32 { + let side1 = x * y; + let side2 = y * z; + let side3 = z * x; + let slack = vmin(vec![side1, side2, side3]); + return (2 * side1) + (2 * side2) + (2 * side3) + slack; +} + +fn vmin(v:Vec<i32>) -> i32 { + let mut m = v[0]; + for e in v { + if e < m { + m = e; + } + } + + m +} + +#[test] +fn sqfootage_computes_area_with_slack() { + assert_eq!(58, sqfootage(2, 3, 4)); + assert_eq!(43, sqfootage(1, 1, 10)); +} + +#[test] +fn min_works() { + assert_eq!(1, vmin(vec![1])); + assert_eq!(1, vmin(vec![2, 1])); + assert_eq!(1, vmin(vec![1, 2, 3])); + assert_eq!(1, vmin(vec![2, 1, 3])); +}
--- a/day2/solution/src/main.rs +++ b/day2/solution/src/main.rs @@ -5,5 +5,43 @@ CON let stdin = io::stdin(); ADD let mut total = 0; CON for line in stdin.lock().lines() { DEL println!("{}", line.unwrap()); ADD let l = line.unwrap(); ADD let split: Vec<i32> = l.split('x').map(|s| s.parse::<i32>().unwrap()).collect(); ADD total += sqfootage(split[0], split[1], split[2]); CON } ADD ADD println!("Total is {} square feet.", total) CON } ADD ADD fn sqfootage(x:i32, y:i32, z:i32) -> i32 { ADD let side1 = x * y; ADD let side2 = y * z; ADD let side3 = z * x; ADD let slack = vmin(vec![side1, side2, side3]); ADD return (2 * side1) + (2 * side2) + (2 * side3) + slack; ADD } ADD ADD fn vmin(v:Vec<i32>) -> i32 { ADD let mut m = v[0]; ADD for e in v { ADD if e < m { ADD m = e; ADD } ADD } ADD ADD m ADD } ADD ADD #[test] ADD fn sqfootage_computes_area_with_slack() { ADD assert_eq!(58, sqfootage(2, 3, 4)); ADD assert_eq!(43, sqfootage(1, 1, 10)); ADD } ADD ADD #[test] ADD fn min_works() { ADD assert_eq!(1, vmin(vec![1])); ADD assert_eq!(1, vmin(vec![2, 1])); ADD assert_eq!(1, vmin(vec![1, 2, 3])); ADD assert_eq!(1, vmin(vec![2, 1, 3])); ADD }
<<<<<<< SEARCH fn main() { let stdin = io::stdin(); for line in stdin.lock().lines() { println!("{}", line.unwrap()); } } ======= fn main() { let stdin = io::stdin(); let mut total = 0; for line in stdin.lock().lines() { let l = line.unwrap(); let split: Vec<i32> = l.split('x').map(|s| s.parse::<i32>().unwrap()).collect(); total += sqfootage(split[0], split[1], split[2]); } println!("Total is {} square feet.", total) } fn sqfootage(x:i32, y:i32, z:i32) -> i32 { let side1 = x * y; let side2 = y * z; let side3 = z * x; let slack = vmin(vec![side1, side2, side3]); return (2 * side1) + (2 * side2) + (2 * side3) + slack; } fn vmin(v:Vec<i32>) -> i32 { let mut m = v[0]; for e in v { if e < m { m = e; } } m } #[test] fn sqfootage_computes_area_with_slack() { assert_eq!(58, sqfootage(2, 3, 4)); assert_eq!(43, sqfootage(1, 1, 10)); } #[test] fn min_works() { assert_eq!(1, vmin(vec![1])); assert_eq!(1, vmin(vec![2, 1])); assert_eq!(1, vmin(vec![1, 2, 3])); assert_eq!(1, vmin(vec![2, 1, 3])); } >>>>>>> REPLACE
LouisCAD/Splitties
146b94ad21ce1b4af6d401d0bc1ca07225f6763b
modules/coroutines/src/commonMain/kotlin/splitties/coroutines/ScopeLoops.kt
kotlin
apache-2.0
Add KDoc for repeatWhileActive overload
/* * Copyright 2019 Louis Cognault Ayeva Derman. Use of this source code is governed by the Apache 2.0 license. */ package splitties.coroutines import kotlinx.coroutines.CancellationException import kotlinx.coroutines.ensureActive import splitties.experimental.ExperimentalSplittiesApi import kotlin.coroutines.coroutineContext /** * As of Kotlin 1.3, `while (true)` evaluates to [Unit] instead of [Nothing] in lambdas, and using * `coroutineContext.ensureActive()` would add another line of boilerplate, so this inline extension * function can be handy. The fact that is is inline allows you to do a non local return just like * you would from a while loop. */ suspend inline fun repeatWhileActive(block: () -> Unit): Nothing { while (true) { coroutineContext.ensureActive() block() } } @ExperimentalSplittiesApi suspend inline fun repeatWhileActive( ignoreInnerCancellations: Boolean, block: () -> Unit ): Nothing { if (ignoreInnerCancellations) while (true) { coroutineContext.ensureActive() // Outer cancellations are caught here try { block() } catch (ignored: CancellationException) { } } else repeatWhileActive(block) }
/* * Copyright 2019 Louis Cognault Ayeva Derman. Use of this source code is governed by the Apache 2.0 license. */ package splitties.coroutines import kotlinx.coroutines.CancellationException import kotlinx.coroutines.ensureActive import splitties.experimental.ExperimentalSplittiesApi import kotlin.coroutines.coroutineContext /** * As of Kotlin 1.3, `while (true)` evaluates to [Unit] instead of [Nothing] in lambdas, and using * `coroutineContext.ensureActive()` would add another line of boilerplate, so this inline extension * function can be handy. The fact that is is inline allows you to do a non local return just like * you would from a while loop. */ suspend inline fun repeatWhileActive(block: () -> Unit): Nothing { while (true) { coroutineContext.ensureActive() block() } } /** * As of Kotlin 1.3, `while (true)` evaluates to [Unit] instead of [Nothing] in lambdas, and using * `coroutineContext.ensureActive()` would add another line of boilerplate, so this inline extension * function can be handy. The fact that is is inline allows you to do a non local return just like * you would from a while loop. * * If [ignoreInnerCancellations] is `true`, [CancellationException]s thrown from the [block] will be * caught and ignored. Next iteration will still check for cancellation, so it will exit safely by * throwing it if the entire scope is cancelled. This gives a chance to recover from local * cancellations in an iteration. */ @ExperimentalSplittiesApi suspend inline fun repeatWhileActive( ignoreInnerCancellations: Boolean, block: () -> Unit ): Nothing { if (ignoreInnerCancellations) while (true) { coroutineContext.ensureActive() // Outer cancellations are caught here try { block() } catch (ignored: CancellationException) { } } else repeatWhileActive(block) }
11
0
1
add_only
--- a/modules/coroutines/src/commonMain/kotlin/splitties/coroutines/ScopeLoops.kt +++ b/modules/coroutines/src/commonMain/kotlin/splitties/coroutines/ScopeLoops.kt @@ -24,2 +24,13 @@ +/** + * As of Kotlin 1.3, `while (true)` evaluates to [Unit] instead of [Nothing] in lambdas, and using + * `coroutineContext.ensureActive()` would add another line of boilerplate, so this inline extension + * function can be handy. The fact that is is inline allows you to do a non local return just like + * you would from a while loop. + * + * If [ignoreInnerCancellations] is `true`, [CancellationException]s thrown from the [block] will be + * caught and ignored. Next iteration will still check for cancellation, so it will exit safely by + * throwing it if the entire scope is cancelled. This gives a chance to recover from local + * cancellations in an iteration. + */ @ExperimentalSplittiesApi
--- a/modules/coroutines/src/commonMain/kotlin/splitties/coroutines/ScopeLoops.kt +++ b/modules/coroutines/src/commonMain/kotlin/splitties/coroutines/ScopeLoops.kt @@ ... @@ +/** + * As of Kotlin 1.3, `while (true)` evaluates to [Unit] instead of [Nothing] in lambdas, and using + * `coroutineContext.ensureActive()` would add another line of boilerplate, so this inline extension + * function can be handy. The fact that is is inline allows you to do a non local return just like + * you would from a while loop. + * + * If [ignoreInnerCancellations] is `true`, [CancellationException]s thrown from the [block] will be + * caught and ignored. Next iteration will still check for cancellation, so it will exit safely by + * throwing it if the entire scope is cancelled. This gives a chance to recover from local + * cancellations in an iteration. + */ @ExperimentalSplittiesApi
--- a/modules/coroutines/src/commonMain/kotlin/splitties/coroutines/ScopeLoops.kt +++ b/modules/coroutines/src/commonMain/kotlin/splitties/coroutines/ScopeLoops.kt @@ -24,2 +24,13 @@ CON ADD /** ADD * As of Kotlin 1.3, `while (true)` evaluates to [Unit] instead of [Nothing] in lambdas, and using ADD * `coroutineContext.ensureActive()` would add another line of boilerplate, so this inline extension ADD * function can be handy. The fact that is is inline allows you to do a non local return just like ADD * you would from a while loop. ADD * ADD * If [ignoreInnerCancellations] is `true`, [CancellationException]s thrown from the [block] will be ADD * caught and ignored. Next iteration will still check for cancellation, so it will exit safely by ADD * throwing it if the entire scope is cancelled. This gives a chance to recover from local ADD * cancellations in an iteration. ADD */ CON @ExperimentalSplittiesApi
<<<<<<< SEARCH } @ExperimentalSplittiesApi suspend inline fun repeatWhileActive( ======= } /** * As of Kotlin 1.3, `while (true)` evaluates to [Unit] instead of [Nothing] in lambdas, and using * `coroutineContext.ensureActive()` would add another line of boilerplate, so this inline extension * function can be handy. The fact that is is inline allows you to do a non local return just like * you would from a while loop. * * If [ignoreInnerCancellations] is `true`, [CancellationException]s thrown from the [block] will be * caught and ignored. Next iteration will still check for cancellation, so it will exit safely by * throwing it if the entire scope is cancelled. This gives a chance to recover from local * cancellations in an iteration. */ @ExperimentalSplittiesApi suspend inline fun repeatWhileActive( >>>>>>> REPLACE
lervag/vimtex
bd59db76bb81218d04224e44773eae9d3d9dfc21
rplugin/python3/denite/source/toc.py
python
mit
Fix Denite support for vim8.
# -*- coding: utf-8 -*- from .base import Base class Source(Base): def __init__(self, vim): super().__init__(vim) self.name = 'vimtex_toc' self.kind = 'file' @staticmethod def format_number(n): if not n or n['frontmatter'] or n['backmatter']: return '' num = [str(n[k]) for k in [ 'part', 'chapter', 'section', 'subsection', 'subsubsection', 'subsubsubsection'] if n[k] is not 0] if n['appendix']: num[0] = chr(int(num[0]) + 64) fnum = '.'.join(num) return fnum @staticmethod def create_candidate(e, depth): indent = (' ' * 2*(depth - e['level']) + e['title'])[:60] number = Source.format_number(e['number']) abbr = '{:65}{:10}'.format(indent, number) return {'word': e['title'], 'abbr': abbr, 'action__path': e['file'], 'action__line': e.get('line', 0)} def gather_candidates(self, context): entries = self.vim.eval('vimtex#toc#get_entries()') depth = max([e['level'] for e in entries]) return [Source.create_candidate(e, depth) for e in entries]
# -*- coding: utf-8 -*- from .base import Base class Source(Base): def __init__(self, vim): super().__init__(vim) self.name = 'vimtex_toc' self.kind = 'file' @staticmethod def format_number(n): if not n or not type(n) is dict or n['frontmatter'] or n['backmatter']: return '' num = [str(n[k]) for k in [ 'part', 'chapter', 'section', 'subsection', 'subsubsection', 'subsubsubsection'] if n[k] is not 0] if n['appendix']: num[0] = chr(int(num[0]) + 64) fnum = '.'.join(num) return fnum @staticmethod def create_candidate(e, depth): indent = (' ' * 2*(int(depth) - int(e['level'])) + e['title'])[:60] number = Source.format_number(e['number']) abbr = '{:65}{:10}'.format(indent, number) return {'word': e['title'], 'abbr': abbr, 'action__path': e['file'], 'action__line': e.get('line', 0)} def gather_candidates(self, context): entries = self.vim.eval('vimtex#toc#get_entries()') depth = max([int(e['level']) for e in entries]) return [Source.create_candidate(e, depth) for e in entries]
3
3
3
mixed
--- a/rplugin/python3/denite/source/toc.py +++ b/rplugin/python3/denite/source/toc.py @@ -15,3 +15,3 @@ def format_number(n): - if not n or n['frontmatter'] or n['backmatter']: + if not n or not type(n) is dict or n['frontmatter'] or n['backmatter']: return '' @@ -34,3 +34,3 @@ def create_candidate(e, depth): - indent = (' ' * 2*(depth - e['level']) + e['title'])[:60] + indent = (' ' * 2*(int(depth) - int(e['level'])) + e['title'])[:60] number = Source.format_number(e['number']) @@ -44,3 +44,3 @@ entries = self.vim.eval('vimtex#toc#get_entries()') - depth = max([e['level'] for e in entries]) + depth = max([int(e['level']) for e in entries]) return [Source.create_candidate(e, depth) for e in entries]
--- a/rplugin/python3/denite/source/toc.py +++ b/rplugin/python3/denite/source/toc.py @@ ... @@ def format_number(n): - if not n or n['frontmatter'] or n['backmatter']: + if not n or not type(n) is dict or n['frontmatter'] or n['backmatter']: return '' @@ ... @@ def create_candidate(e, depth): - indent = (' ' * 2*(depth - e['level']) + e['title'])[:60] + indent = (' ' * 2*(int(depth) - int(e['level'])) + e['title'])[:60] number = Source.format_number(e['number']) @@ ... @@ entries = self.vim.eval('vimtex#toc#get_entries()') - depth = max([e['level'] for e in entries]) + depth = max([int(e['level']) for e in entries]) return [Source.create_candidate(e, depth) for e in entries]
--- a/rplugin/python3/denite/source/toc.py +++ b/rplugin/python3/denite/source/toc.py @@ -15,3 +15,3 @@ CON def format_number(n): DEL if not n or n['frontmatter'] or n['backmatter']: ADD if not n or not type(n) is dict or n['frontmatter'] or n['backmatter']: CON return '' @@ -34,3 +34,3 @@ CON def create_candidate(e, depth): DEL indent = (' ' * 2*(depth - e['level']) + e['title'])[:60] ADD indent = (' ' * 2*(int(depth) - int(e['level'])) + e['title'])[:60] CON number = Source.format_number(e['number']) @@ -44,3 +44,3 @@ CON entries = self.vim.eval('vimtex#toc#get_entries()') DEL depth = max([e['level'] for e in entries]) ADD depth = max([int(e['level']) for e in entries]) CON return [Source.create_candidate(e, depth) for e in entries]
<<<<<<< SEARCH @staticmethod def format_number(n): if not n or n['frontmatter'] or n['backmatter']: return '' ======= @staticmethod def format_number(n): if not n or not type(n) is dict or n['frontmatter'] or n['backmatter']: return '' >>>>>>> REPLACE <<<<<<< SEARCH @staticmethod def create_candidate(e, depth): indent = (' ' * 2*(depth - e['level']) + e['title'])[:60] number = Source.format_number(e['number']) abbr = '{:65}{:10}'.format(indent, number) ======= @staticmethod def create_candidate(e, depth): indent = (' ' * 2*(int(depth) - int(e['level'])) + e['title'])[:60] number = Source.format_number(e['number']) abbr = '{:65}{:10}'.format(indent, number) >>>>>>> REPLACE <<<<<<< SEARCH def gather_candidates(self, context): entries = self.vim.eval('vimtex#toc#get_entries()') depth = max([e['level'] for e in entries]) return [Source.create_candidate(e, depth) for e in entries] ======= def gather_candidates(self, context): entries = self.vim.eval('vimtex#toc#get_entries()') depth = max([int(e['level']) for e in entries]) return [Source.create_candidate(e, depth) for e in entries] >>>>>>> REPLACE
argoroots/Entu
de9a6f647d0a6082e2a473895ec61ba23b41753e
controllers/oldauth.py
python
mit
Create users when they log in
import hashlib import base64 from datetime import date from bo import * from database.oldauth import * class Login(webapp.RequestHandler): def get(self): if self.request.get('site'): user = users.get_current_user() site = self.request.get('site') oa = db.Query(OldAuth).filter('site', site).get() if not oa: oa = OldAuth() oa.site = site oa.put() user_name = user.nickname() user_key = hashlib.md5(user.nickname() + date.today().strftime('%Y-%m-%d') + oa.salt).hexdigest() key = base64.b64encode(user_key + user_name) if oa.loginurl: self.redirect(oa.loginurl % key) class Logout(webapp.RequestHandler): def get(self): if self.request.get('site'): user = users.get_current_user() site = self.request.get('site') oa = db.Query(OldAuth).filter('site', site).get() if oa: self.redirect(users.create_logout_url(oa.logouturl)) def main(): Route([ ('/oldauth', Login), ('/oldauth_exit', Logout), ]) if __name__ == '__main__': main()
import hashlib import base64 from datetime import date from bo import * from database.oldauth import * class Login(webapp.RequestHandler): def get(self): if self.request.get('site'): u = User().current() user = users.get_current_user() site = self.request.get('site') oa = db.Query(OldAuth).filter('site', site).get() if not oa: oa = OldAuth() oa.site = site oa.put() user_name = user.nickname() user_key = hashlib.md5(user.nickname() + date.today().strftime('%Y-%m-%d') + oa.salt).hexdigest() key = base64.b64encode(user_key + user_name) if oa.loginurl: self.redirect(oa.loginurl % key) class Logout(webapp.RequestHandler): def get(self): if self.request.get('site'): user = users.get_current_user() site = self.request.get('site') oa = db.Query(OldAuth).filter('site', site).get() if oa: self.redirect(users.create_logout_url(oa.logouturl)) def main(): Route([ ('/oldauth', Login), ('/oldauth_exit', Logout), ]) if __name__ == '__main__': main()
3
0
1
add_only
--- a/controllers/oldauth.py +++ b/controllers/oldauth.py @@ -11,2 +11,5 @@ if self.request.get('site'): + + u = User().current() + user = users.get_current_user()
--- a/controllers/oldauth.py +++ b/controllers/oldauth.py @@ ... @@ if self.request.get('site'): + + u = User().current() + user = users.get_current_user()
--- a/controllers/oldauth.py +++ b/controllers/oldauth.py @@ -11,2 +11,5 @@ CON if self.request.get('site'): ADD ADD u = User().current() ADD CON user = users.get_current_user()
<<<<<<< SEARCH def get(self): if self.request.get('site'): user = users.get_current_user() site = self.request.get('site') ======= def get(self): if self.request.get('site'): u = User().current() user = users.get_current_user() site = self.request.get('site') >>>>>>> REPLACE
rowedonalde/zip-info
04d1dd82ca861f4ab83b9c9439ea972c433cf0fe
src/json_writer.rs
rust
mit
Add test for serializing ZipObjectJsonWriter
use ::serde_json; use ::zip; use std::collections::HashMap; #[derive(Serialize, Debug)] struct MultiArchiveJsonWriter<'a> { archives: HashMap<&'a str, &'a ZipArchiveJsonWriter<'a>>, } #[derive(Serialize, Debug)] struct ZipArchiveJsonWriter<'a> { objects: HashMap<&'a str, &'a ZipObjectJsonWriter>, } #[derive(Serialize, Debug)] struct ZipObjectJsonWriter { compression_type: String, original_size: u64, compressed_size: u64, compression_rate: String, } #[cfg(test)] mod tests { use super::*; #[test] fn test_serialize_json_writer() { let zip_object = ZipObjectJsonWriter { compression_type: format!("{}", zip::CompressionMethod::Deflated), original_size: 100, compressed_size: 50, compression_rate: String::from("50%"), }; let zip_object_serialized = serde_json::to_string(&zip_object) .unwrap(); println!("zip_object_serialized: {}", zip_object_serialized); } }
use ::serde_json; use ::zip; use std::collections::HashMap; #[derive(Serialize, Debug)] struct MultiArchiveJsonWriter<'a> { archives: HashMap<&'a str, &'a ZipArchiveJsonWriter<'a>>, } #[derive(Serialize, Debug)] struct ZipArchiveJsonWriter<'a> { objects: HashMap<&'a str, &'a ZipObjectJsonWriter>, } #[derive(Serialize, Deserialize, Debug, PartialEq)] struct ZipObjectJsonWriter { compression_type: String, original_size: u64, compressed_size: u64, compression_rate: String, } #[cfg(test)] mod tests { use super::*; #[test] fn test_serialize_json_writer() { let zip_object = ZipObjectJsonWriter { compression_type: format!("{}", zip::CompressionMethod::Deflated), original_size: 100, compressed_size: 50, compression_rate: String::from("50%"), }; let zip_object_serialized = serde_json::to_string(&zip_object) .unwrap(); let zip_object_pretty = serde_json:: to_string_pretty(&zip_object).unwrap(); let zip_object_deserialized: ZipObjectJsonWriter = serde_json::from_str(zip_object_serialized.as_str()).unwrap(); let zip_object_depretty: ZipObjectJsonWriter = serde_json::from_str(zip_object_pretty.as_str()).unwrap(); assert_eq!(zip_object, zip_object_deserialized); assert_eq!(zip_object, zip_object_depretty); } }
12
2
2
mixed
--- a/src/json_writer.rs +++ b/src/json_writer.rs @@ -14,3 +14,3 @@ -#[derive(Serialize, Debug)] +#[derive(Serialize, Deserialize, Debug, PartialEq)] struct ZipObjectJsonWriter { @@ -38,3 +38,13 @@ - println!("zip_object_serialized: {}", zip_object_serialized); + let zip_object_pretty = serde_json:: + to_string_pretty(&zip_object).unwrap(); + + let zip_object_deserialized: ZipObjectJsonWriter = + serde_json::from_str(zip_object_serialized.as_str()).unwrap(); + + let zip_object_depretty: ZipObjectJsonWriter = + serde_json::from_str(zip_object_pretty.as_str()).unwrap(); + + assert_eq!(zip_object, zip_object_deserialized); + assert_eq!(zip_object, zip_object_depretty); }
--- a/src/json_writer.rs +++ b/src/json_writer.rs @@ ... @@ -#[derive(Serialize, Debug)] +#[derive(Serialize, Deserialize, Debug, PartialEq)] struct ZipObjectJsonWriter { @@ ... @@ - println!("zip_object_serialized: {}", zip_object_serialized); + let zip_object_pretty = serde_json:: + to_string_pretty(&zip_object).unwrap(); + + let zip_object_deserialized: ZipObjectJsonWriter = + serde_json::from_str(zip_object_serialized.as_str()).unwrap(); + + let zip_object_depretty: ZipObjectJsonWriter = + serde_json::from_str(zip_object_pretty.as_str()).unwrap(); + + assert_eq!(zip_object, zip_object_deserialized); + assert_eq!(zip_object, zip_object_depretty); }
--- a/src/json_writer.rs +++ b/src/json_writer.rs @@ -14,3 +14,3 @@ CON DEL #[derive(Serialize, Debug)] ADD #[derive(Serialize, Deserialize, Debug, PartialEq)] CON struct ZipObjectJsonWriter { @@ -38,3 +38,13 @@ CON DEL println!("zip_object_serialized: {}", zip_object_serialized); ADD let zip_object_pretty = serde_json:: ADD to_string_pretty(&zip_object).unwrap(); ADD ADD let zip_object_deserialized: ZipObjectJsonWriter = ADD serde_json::from_str(zip_object_serialized.as_str()).unwrap(); ADD ADD let zip_object_depretty: ZipObjectJsonWriter = ADD serde_json::from_str(zip_object_pretty.as_str()).unwrap(); ADD ADD assert_eq!(zip_object, zip_object_deserialized); ADD assert_eq!(zip_object, zip_object_depretty); CON }
<<<<<<< SEARCH } #[derive(Serialize, Debug)] struct ZipObjectJsonWriter { compression_type: String, ======= } #[derive(Serialize, Deserialize, Debug, PartialEq)] struct ZipObjectJsonWriter { compression_type: String, >>>>>>> REPLACE <<<<<<< SEARCH .unwrap(); println!("zip_object_serialized: {}", zip_object_serialized); } } ======= .unwrap(); let zip_object_pretty = serde_json:: to_string_pretty(&zip_object).unwrap(); let zip_object_deserialized: ZipObjectJsonWriter = serde_json::from_str(zip_object_serialized.as_str()).unwrap(); let zip_object_depretty: ZipObjectJsonWriter = serde_json::from_str(zip_object_pretty.as_str()).unwrap(); assert_eq!(zip_object, zip_object_deserialized); assert_eq!(zip_object, zip_object_depretty); } } >>>>>>> REPLACE
centic9/jgit-cookbook
ed8eff82faf70c36c6129d1f6951f5cec6609012
src/main/java/org/dstadler/jgit/CreateNewRepository.java
java
apache-2.0
Rewrite example to use preferred way of creating a new repository
package org.dstadler.jgit; /* Copyright 2013, 2014 Dominik Stadler Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ import java.io.File; import java.io.IOException; import org.apache.commons.io.FileUtils; import org.eclipse.jgit.lib.Repository; import org.eclipse.jgit.storage.file.FileRepositoryBuilder; /** * Simple snippet which shows how to create a new repository * * @author dominik.stadler at gmx.at */ public class CreateNewRepository { public static void main(String[] args) throws IOException { // prepare a new folder File localPath = File.createTempFile("TestGitRepository", ""); localPath.delete(); // create the directory Repository repository = FileRepositoryBuilder.create(new File(localPath, ".git")); repository.create(); System.out.println("Having repository: " + repository.getDirectory()); repository.close(); FileUtils.deleteDirectory(localPath); } }
package org.dstadler.jgit; /* Copyright 2013, 2014 Dominik Stadler Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ import java.io.File; import java.io.IOException; import org.apache.commons.io.FileUtils; import org.eclipse.jgit.api.Git; import org.eclipse.jgit.api.errors.GitAPIException; /** * Simple snippet which shows how to create a new repository * * @author dominik.stadler at gmx.at */ public class CreateNewRepository { public static void main(String[] args) throws IOException, IllegalStateException, GitAPIException { // prepare a new folder File localPath = File.createTempFile("TestGitRepository", ""); localPath.delete(); // create the directory Git git = Git.init().setDirectory(localPath).call(); System.out.println("Having repository: " + git.getRepository().getDirectory()); git.close(); FileUtils.deleteDirectory(localPath); } }
7
8
4
mixed
--- a/src/main/java/org/dstadler/jgit/CreateNewRepository.java +++ b/src/main/java/org/dstadler/jgit/CreateNewRepository.java @@ -22,4 +22,4 @@ import org.apache.commons.io.FileUtils; -import org.eclipse.jgit.lib.Repository; -import org.eclipse.jgit.storage.file.FileRepositoryBuilder; +import org.eclipse.jgit.api.Git; +import org.eclipse.jgit.api.errors.GitAPIException; @@ -27,3 +27,3 @@ * Simple snippet which shows how to create a new repository - * + * * @author dominik.stadler at gmx.at @@ -32,3 +32,3 @@ - public static void main(String[] args) throws IOException { + public static void main(String[] args) throws IOException, IllegalStateException, GitAPIException { // prepare a new folder @@ -38,8 +38,7 @@ // create the directory - Repository repository = FileRepositoryBuilder.create(new File(localPath, ".git")); - repository.create(); + Git git = Git.init().setDirectory(localPath).call(); - System.out.println("Having repository: " + repository.getDirectory()); + System.out.println("Having repository: " + git.getRepository().getDirectory()); - repository.close(); + git.close();
--- a/src/main/java/org/dstadler/jgit/CreateNewRepository.java +++ b/src/main/java/org/dstadler/jgit/CreateNewRepository.java @@ ... @@ import org.apache.commons.io.FileUtils; -import org.eclipse.jgit.lib.Repository; -import org.eclipse.jgit.storage.file.FileRepositoryBuilder; +import org.eclipse.jgit.api.Git; +import org.eclipse.jgit.api.errors.GitAPIException; @@ ... @@ * Simple snippet which shows how to create a new repository - * + * * @author dominik.stadler at gmx.at @@ ... @@ - public static void main(String[] args) throws IOException { + public static void main(String[] args) throws IOException, IllegalStateException, GitAPIException { // prepare a new folder @@ ... @@ // create the directory - Repository repository = FileRepositoryBuilder.create(new File(localPath, ".git")); - repository.create(); + Git git = Git.init().setDirectory(localPath).call(); - System.out.println("Having repository: " + repository.getDirectory()); + System.out.println("Having repository: " + git.getRepository().getDirectory()); - repository.close(); + git.close();
--- a/src/main/java/org/dstadler/jgit/CreateNewRepository.java +++ b/src/main/java/org/dstadler/jgit/CreateNewRepository.java @@ -22,4 +22,4 @@ CON import org.apache.commons.io.FileUtils; DEL import org.eclipse.jgit.lib.Repository; DEL import org.eclipse.jgit.storage.file.FileRepositoryBuilder; ADD import org.eclipse.jgit.api.Git; ADD import org.eclipse.jgit.api.errors.GitAPIException; CON @@ -27,3 +27,3 @@ CON * Simple snippet which shows how to create a new repository DEL * ADD * CON * @author dominik.stadler at gmx.at @@ -32,3 +32,3 @@ CON DEL public static void main(String[] args) throws IOException { ADD public static void main(String[] args) throws IOException, IllegalStateException, GitAPIException { CON // prepare a new folder @@ -38,8 +38,7 @@ CON // create the directory DEL Repository repository = FileRepositoryBuilder.create(new File(localPath, ".git")); DEL repository.create(); ADD Git git = Git.init().setDirectory(localPath).call(); CON DEL System.out.println("Having repository: " + repository.getDirectory()); ADD System.out.println("Having repository: " + git.getRepository().getDirectory()); CON DEL repository.close(); ADD git.close(); CON
<<<<<<< SEARCH import org.apache.commons.io.FileUtils; import org.eclipse.jgit.lib.Repository; import org.eclipse.jgit.storage.file.FileRepositoryBuilder; /** * Simple snippet which shows how to create a new repository * * @author dominik.stadler at gmx.at */ public class CreateNewRepository { public static void main(String[] args) throws IOException { // prepare a new folder File localPath = File.createTempFile("TestGitRepository", ""); localPath.delete(); // create the directory Repository repository = FileRepositoryBuilder.create(new File(localPath, ".git")); repository.create(); System.out.println("Having repository: " + repository.getDirectory()); repository.close(); FileUtils.deleteDirectory(localPath); ======= import org.apache.commons.io.FileUtils; import org.eclipse.jgit.api.Git; import org.eclipse.jgit.api.errors.GitAPIException; /** * Simple snippet which shows how to create a new repository * * @author dominik.stadler at gmx.at */ public class CreateNewRepository { public static void main(String[] args) throws IOException, IllegalStateException, GitAPIException { // prepare a new folder File localPath = File.createTempFile("TestGitRepository", ""); localPath.delete(); // create the directory Git git = Git.init().setDirectory(localPath).call(); System.out.println("Having repository: " + git.getRepository().getDirectory()); git.close(); FileUtils.deleteDirectory(localPath); >>>>>>> REPLACE
smalldatalab/omh-dsu
842f50880b7ef8612cd90bdde9dfc16b874ca5fc
src/main/java/org/openmhealth/dsu/repository/UserRepository.java
java
apache-2.0
Update user repository to match removal of user approval logic.
/* * Copyright 2014 Open mHealth * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.openmhealth.dsu.repository; import org.openmhealth.dsu.domain.User; import org.springframework.data.repository.Repository; import java.util.Optional; /** * A repository of user accounts. * * @author Emerson Farrugia */ public interface UserRepository extends Repository<User, String> { /** * @see org.springframework.data.repository.CrudRepository#findOne(java.io.Serializable) */ Optional<User> findOne(String username); Optional<User> findByRegistrationKey(String registrationKey); /** * @see org.springframework.data.repository.CrudRepository#save(Object) */ void save(User user); }
/* * Copyright 2014 Open mHealth * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.openmhealth.dsu.repository; import org.openmhealth.dsu.domain.User; import org.springframework.data.repository.Repository; import java.util.Optional; /** * A repository of user accounts. * * @author Emerson Farrugia */ public interface UserRepository extends Repository<User, String> { /** * @see org.springframework.data.repository.CrudRepository#findOne(java.io.Serializable) */ Optional<User> findOne(String username); /** * @see org.springframework.data.repository.CrudRepository#save(Object) */ User save(User user); }
1
3
2
mixed
--- a/src/main/java/org/openmhealth/dsu/repository/UserRepository.java +++ b/src/main/java/org/openmhealth/dsu/repository/UserRepository.java @@ -36,4 +36,2 @@ - Optional<User> findByRegistrationKey(String registrationKey); - /** @@ -41,3 +39,3 @@ */ - void save(User user); + User save(User user); }
--- a/src/main/java/org/openmhealth/dsu/repository/UserRepository.java +++ b/src/main/java/org/openmhealth/dsu/repository/UserRepository.java @@ ... @@ - Optional<User> findByRegistrationKey(String registrationKey); - /** @@ ... @@ */ - void save(User user); + User save(User user); }
--- a/src/main/java/org/openmhealth/dsu/repository/UserRepository.java +++ b/src/main/java/org/openmhealth/dsu/repository/UserRepository.java @@ -36,4 +36,2 @@ CON DEL Optional<User> findByRegistrationKey(String registrationKey); DEL CON /** @@ -41,3 +39,3 @@ CON */ DEL void save(User user); ADD User save(User user); CON }
<<<<<<< SEARCH Optional<User> findOne(String username); Optional<User> findByRegistrationKey(String registrationKey); /** * @see org.springframework.data.repository.CrudRepository#save(Object) */ void save(User user); } ======= Optional<User> findOne(String username); /** * @see org.springframework.data.repository.CrudRepository#save(Object) */ User save(User user); } >>>>>>> REPLACE
mcdimus/mate-wp
3f296b1f7b3a7aad3643a7d925676909658cabe4
src/main/kotlin/ee/mcdimus/matewp/service/FileSystemService.kt
kotlin
mit
Use system property 'user.home' instead of environment variable HOME, which is available only on Linux.
package ee.mcdimus.matewp.service import java.nio.file.Files import java.nio.file.Path import java.nio.file.Paths import java.util.* /** * @author Dmitri Maksimov */ class FileSystemService { companion object { private const val HOME = "HOME" } fun getHomeDirectory(): Path { val homeDirectoryPath = System.getenv() .getOrElse(HOME, { throw IllegalStateException("environment variable $HOME is no defined") }) return Paths.get(homeDirectoryPath) } fun getImagesDirectory(): Path { val imagesDirectory = getHomeDirectory().resolve("Pictures/mate-wp") if (Files.notExists(imagesDirectory)) { return Files.createDirectories(imagesDirectory) } return imagesDirectory } fun getConfigsDirectory(): Path { val configsDirectory = getImagesDirectory().resolve("configs") if (Files.notExists(configsDirectory)) { return Files.createDirectories(configsDirectory) } return configsDirectory } fun saveProperties(propertiesPath: Path, propertyMap: Map<String, String>): Path { val properties = Properties() for ((key, value) in propertyMap) { properties.setProperty(key, value) } Files.newOutputStream(propertiesPath).use { properties.store(it, null) } return propertiesPath } fun loadProperties(propertiesPath: Path): Properties { val properties = Properties() Files.newInputStream(propertiesPath).use { properties.load(it) } return properties } }
package ee.mcdimus.matewp.service import java.nio.file.Files import java.nio.file.Path import java.nio.file.Paths import java.util.* /** * @author Dmitri Maksimov */ class FileSystemService { companion object { private const val USER_HOME = "user.home" } fun getHomeDirectory(): Path { val homeDirectoryPath = System.getProperty(USER_HOME) ?: throw IllegalStateException("system property $USER_HOME is not defined") return Paths.get(homeDirectoryPath) } fun getImagesDirectory(): Path { val imagesDirectory = getHomeDirectory().resolve("Pictures/mate-wp") if (Files.notExists(imagesDirectory)) { return Files.createDirectories(imagesDirectory) } return imagesDirectory } fun getConfigsDirectory(): Path { val configsDirectory = getImagesDirectory().resolve("configs") if (Files.notExists(configsDirectory)) { return Files.createDirectories(configsDirectory) } return configsDirectory } fun saveProperties(propertiesPath: Path, propertyMap: Map<String, String>): Path { val properties = Properties() for ((key, value) in propertyMap) { properties.setProperty(key, value) } Files.newOutputStream(propertiesPath).use { properties.store(it, null) } return propertiesPath } fun loadProperties(propertiesPath: Path): Properties { val properties = Properties() Files.newInputStream(propertiesPath).use { properties.load(it) } return properties } }
4
3
2
mixed
--- a/src/main/kotlin/ee/mcdimus/matewp/service/FileSystemService.kt +++ b/src/main/kotlin/ee/mcdimus/matewp/service/FileSystemService.kt @@ -13,3 +13,3 @@ companion object { - private const val HOME = "HOME" + private const val USER_HOME = "user.home" } @@ -17,4 +17,5 @@ fun getHomeDirectory(): Path { - val homeDirectoryPath = System.getenv() - .getOrElse(HOME, { throw IllegalStateException("environment variable $HOME is no defined") }) + val homeDirectoryPath = System.getProperty(USER_HOME) + ?: throw IllegalStateException("system property $USER_HOME is not defined") + return Paths.get(homeDirectoryPath)
--- a/src/main/kotlin/ee/mcdimus/matewp/service/FileSystemService.kt +++ b/src/main/kotlin/ee/mcdimus/matewp/service/FileSystemService.kt @@ ... @@ companion object { - private const val HOME = "HOME" + private const val USER_HOME = "user.home" } @@ ... @@ fun getHomeDirectory(): Path { - val homeDirectoryPath = System.getenv() - .getOrElse(HOME, { throw IllegalStateException("environment variable $HOME is no defined") }) + val homeDirectoryPath = System.getProperty(USER_HOME) + ?: throw IllegalStateException("system property $USER_HOME is not defined") + return Paths.get(homeDirectoryPath)
--- a/src/main/kotlin/ee/mcdimus/matewp/service/FileSystemService.kt +++ b/src/main/kotlin/ee/mcdimus/matewp/service/FileSystemService.kt @@ -13,3 +13,3 @@ CON companion object { DEL private const val HOME = "HOME" ADD private const val USER_HOME = "user.home" CON } @@ -17,4 +17,5 @@ CON fun getHomeDirectory(): Path { DEL val homeDirectoryPath = System.getenv() DEL .getOrElse(HOME, { throw IllegalStateException("environment variable $HOME is no defined") }) ADD val homeDirectoryPath = System.getProperty(USER_HOME) ADD ?: throw IllegalStateException("system property $USER_HOME is not defined") ADD CON return Paths.get(homeDirectoryPath)
<<<<<<< SEARCH companion object { private const val HOME = "HOME" } fun getHomeDirectory(): Path { val homeDirectoryPath = System.getenv() .getOrElse(HOME, { throw IllegalStateException("environment variable $HOME is no defined") }) return Paths.get(homeDirectoryPath) } ======= companion object { private const val USER_HOME = "user.home" } fun getHomeDirectory(): Path { val homeDirectoryPath = System.getProperty(USER_HOME) ?: throw IllegalStateException("system property $USER_HOME is not defined") return Paths.get(homeDirectoryPath) } >>>>>>> REPLACE
pemami4911/Sub8
3875b1ec7d056d337cc1c02d9567cd7ff1ae9748
utils/sub8_ros_tools/sub8_ros_tools/init_helpers.py
python
mit
UTILS: Add init-helper 'wait for subscriber' For integration-testing purposes it is often useful to wait until a particular node subscribes to you
import rospy from time import time def wait_for_param(param_name, timeout=None, poll_rate=0.1): '''Blocking wait for a parameter named $parameter_name to exist Poll at frequency $poll_rate Once the parameter exists, return get and return it. This function intentionally leaves failure logging duties to the developer ''' start_time = time() rate = rospy.Rate(poll_rate) while not rospy.is_shutdown(): # Check if the parameter now exists if rospy.has_param(param_name): return rospy.get_param(param_name) # If we exceed a defined timeout, return None if timeout is not None: if time() - start_time > timeout: return None # Continue to poll at poll_rate rate.sleep()
import rospy import rostest import time def wait_for_param(param_name, timeout=None, poll_rate=0.1): '''Blocking wait for a parameter named $parameter_name to exist Poll at frequency $poll_rate Once the parameter exists, return get and return it. This function intentionally leaves failure logging duties to the developer ''' start_time = time.time() rate = rospy.Rate(poll_rate) while not rospy.is_shutdown(): # Check if the parameter now exists if rospy.has_param(param_name): return rospy.get_param(param_name) # If we exceed a defined timeout, return None if timeout is not None: if time.time() - start_time > timeout: return None # Continue to poll at poll_rate rate.sleep() def wait_for_subscriber(node_name, topic, timeout=5.0): '''Blocks until $node_name subscribes to $topic Useful mostly in integration tests -- I would counsel against use elsewhere ''' end_time = time.time() + timeout resolved_topic = rospy.resolve_name(topic) resolved_node = rospy.resolve_name(node_name) # Wait for time-out or ros-shutdown while (time.time() < end_time) and (not rospy.is_shutdown()): subscribed = rostest.is_subscriber( rospy.resolve_name(topic), rospy.resolve_name(node_name) ) # Success scenario: node subscribes if subscribed: break time.sleep(0.1) # Could do this with a while/else # But chose to explicitly check success = rostest.is_subscriber( rospy.resolve_name(topic), rospy.resolve_name(node_name) ) return success
34
3
4
mixed
--- a/utils/sub8_ros_tools/sub8_ros_tools/init_helpers.py +++ b/utils/sub8_ros_tools/sub8_ros_tools/init_helpers.py @@ -1,3 +1,4 @@ import rospy -from time import time +import rostest +import time @@ -11,3 +12,3 @@ ''' - start_time = time() + start_time = time.time() rate = rospy.Rate(poll_rate) @@ -21,3 +22,3 @@ if timeout is not None: - if time() - start_time > timeout: + if time.time() - start_time > timeout: return None @@ -26 +27,31 @@ rate.sleep() + + +def wait_for_subscriber(node_name, topic, timeout=5.0): + '''Blocks until $node_name subscribes to $topic + Useful mostly in integration tests -- + I would counsel against use elsewhere + ''' + end_time = time.time() + timeout + + resolved_topic = rospy.resolve_name(topic) + resolved_node = rospy.resolve_name(node_name) + + # Wait for time-out or ros-shutdown + while (time.time() < end_time) and (not rospy.is_shutdown()): + subscribed = rostest.is_subscriber( + rospy.resolve_name(topic), + rospy.resolve_name(node_name) + ) + # Success scenario: node subscribes + if subscribed: + break + time.sleep(0.1) + + # Could do this with a while/else + # But chose to explicitly check + success = rostest.is_subscriber( + rospy.resolve_name(topic), + rospy.resolve_name(node_name) + ) + return success
--- a/utils/sub8_ros_tools/sub8_ros_tools/init_helpers.py +++ b/utils/sub8_ros_tools/sub8_ros_tools/init_helpers.py @@ ... @@ import rospy -from time import time +import rostest +import time @@ ... @@ ''' - start_time = time() + start_time = time.time() rate = rospy.Rate(poll_rate) @@ ... @@ if timeout is not None: - if time() - start_time > timeout: + if time.time() - start_time > timeout: return None @@ ... @@ rate.sleep() + + +def wait_for_subscriber(node_name, topic, timeout=5.0): + '''Blocks until $node_name subscribes to $topic + Useful mostly in integration tests -- + I would counsel against use elsewhere + ''' + end_time = time.time() + timeout + + resolved_topic = rospy.resolve_name(topic) + resolved_node = rospy.resolve_name(node_name) + + # Wait for time-out or ros-shutdown + while (time.time() < end_time) and (not rospy.is_shutdown()): + subscribed = rostest.is_subscriber( + rospy.resolve_name(topic), + rospy.resolve_name(node_name) + ) + # Success scenario: node subscribes + if subscribed: + break + time.sleep(0.1) + + # Could do this with a while/else + # But chose to explicitly check + success = rostest.is_subscriber( + rospy.resolve_name(topic), + rospy.resolve_name(node_name) + ) + return success
--- a/utils/sub8_ros_tools/sub8_ros_tools/init_helpers.py +++ b/utils/sub8_ros_tools/sub8_ros_tools/init_helpers.py @@ -1,3 +1,4 @@ CON import rospy DEL from time import time ADD import rostest ADD import time CON @@ -11,3 +12,3 @@ CON ''' DEL start_time = time() ADD start_time = time.time() CON rate = rospy.Rate(poll_rate) @@ -21,3 +22,3 @@ CON if timeout is not None: DEL if time() - start_time > timeout: ADD if time.time() - start_time > timeout: CON return None @@ -26 +27,31 @@ CON rate.sleep() ADD ADD ADD def wait_for_subscriber(node_name, topic, timeout=5.0): ADD '''Blocks until $node_name subscribes to $topic ADD Useful mostly in integration tests -- ADD I would counsel against use elsewhere ADD ''' ADD end_time = time.time() + timeout ADD ADD resolved_topic = rospy.resolve_name(topic) ADD resolved_node = rospy.resolve_name(node_name) ADD ADD # Wait for time-out or ros-shutdown ADD while (time.time() < end_time) and (not rospy.is_shutdown()): ADD subscribed = rostest.is_subscriber( ADD rospy.resolve_name(topic), ADD rospy.resolve_name(node_name) ADD ) ADD # Success scenario: node subscribes ADD if subscribed: ADD break ADD time.sleep(0.1) ADD ADD # Could do this with a while/else ADD # But chose to explicitly check ADD success = rostest.is_subscriber( ADD rospy.resolve_name(topic), ADD rospy.resolve_name(node_name) ADD ) ADD return success
<<<<<<< SEARCH import rospy from time import time ======= import rospy import rostest import time >>>>>>> REPLACE <<<<<<< SEARCH This function intentionally leaves failure logging duties to the developer ''' start_time = time() rate = rospy.Rate(poll_rate) while not rospy.is_shutdown(): ======= This function intentionally leaves failure logging duties to the developer ''' start_time = time.time() rate = rospy.Rate(poll_rate) while not rospy.is_shutdown(): >>>>>>> REPLACE <<<<<<< SEARCH # If we exceed a defined timeout, return None if timeout is not None: if time() - start_time > timeout: return None # Continue to poll at poll_rate rate.sleep() ======= # If we exceed a defined timeout, return None if timeout is not None: if time.time() - start_time > timeout: return None # Continue to poll at poll_rate rate.sleep() def wait_for_subscriber(node_name, topic, timeout=5.0): '''Blocks until $node_name subscribes to $topic Useful mostly in integration tests -- I would counsel against use elsewhere ''' end_time = time.time() + timeout resolved_topic = rospy.resolve_name(topic) resolved_node = rospy.resolve_name(node_name) # Wait for time-out or ros-shutdown while (time.time() < end_time) and (not rospy.is_shutdown()): subscribed = rostest.is_subscriber( rospy.resolve_name(topic), rospy.resolve_name(node_name) ) # Success scenario: node subscribes if subscribed: break time.sleep(0.1) # Could do this with a while/else # But chose to explicitly check success = rostest.is_subscriber( rospy.resolve_name(topic), rospy.resolve_name(node_name) ) return success >>>>>>> REPLACE
rhdunn/xquery-intellij-plugin
f80545a487275ab6a6eecb47dda8b8c06823a0e6
src/main/java/uk/co/reecedunn/intellij/plugin/xdm/model/Items.kt
kotlin
apache-2.0
Make XdmFunction, XdmNode, and derived types concrete classes.
/* * Copyright (C) 2017 Reece H. Dunn * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ /* * XPath and XQuery Type System Part 1: Items * * Reference: https://www.w3.org/TR/xpath-datamodel-31 * Reference: https://www.w3.org/TR/2012/REC-xmlschema11-1-20120405 */ package uk.co.reecedunn.intellij.plugin.xdm.model interface XdmItem: XdmSequenceType interface XdmFunction: XdmItem interface XdmMap: XdmFunction interface XdmArray: XdmFunction interface XdmNode: XdmItem interface XdmAttribute: XdmNode interface XdmComment: XdmNode interface XdmDocument: XdmNode interface XdmElement: XdmNode interface XdmNamespace: XdmNode interface XdmProcessingInstruction: XdmNode interface XdmText: XdmNode
/* * Copyright (C) 2017 Reece H. Dunn * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ /* * XPath and XQuery Type System Part 1: Items * * Reference: https://www.w3.org/TR/xpath-datamodel-31 * Reference: https://www.w3.org/TR/2012/REC-xmlschema11-1-20120405 */ package uk.co.reecedunn.intellij.plugin.xdm.model interface XdmItem: XdmSequenceType open class XdmFunction: XdmItem open class XdmMap: XdmFunction() open class XdmArray: XdmFunction() open class XdmNode: XdmItem open class XdmAttribute: XdmNode() open class XdmComment: XdmNode() open class XdmDocument: XdmNode() open class XdmElement: XdmNode() open class XdmNamespace: XdmNode() open class XdmProcessingInstruction: XdmNode() open class XdmText: XdmNode()
11
11
1
mixed
--- a/src/main/java/uk/co/reecedunn/intellij/plugin/xdm/model/Items.kt +++ b/src/main/java/uk/co/reecedunn/intellij/plugin/xdm/model/Items.kt @@ -25,22 +25,22 @@ -interface XdmFunction: XdmItem +open class XdmFunction: XdmItem -interface XdmMap: XdmFunction +open class XdmMap: XdmFunction() -interface XdmArray: XdmFunction +open class XdmArray: XdmFunction() -interface XdmNode: XdmItem +open class XdmNode: XdmItem -interface XdmAttribute: XdmNode +open class XdmAttribute: XdmNode() -interface XdmComment: XdmNode +open class XdmComment: XdmNode() -interface XdmDocument: XdmNode +open class XdmDocument: XdmNode() -interface XdmElement: XdmNode +open class XdmElement: XdmNode() -interface XdmNamespace: XdmNode +open class XdmNamespace: XdmNode() -interface XdmProcessingInstruction: XdmNode +open class XdmProcessingInstruction: XdmNode() -interface XdmText: XdmNode +open class XdmText: XdmNode()
--- a/src/main/java/uk/co/reecedunn/intellij/plugin/xdm/model/Items.kt +++ b/src/main/java/uk/co/reecedunn/intellij/plugin/xdm/model/Items.kt @@ ... @@ -interface XdmFunction: XdmItem +open class XdmFunction: XdmItem -interface XdmMap: XdmFunction +open class XdmMap: XdmFunction() -interface XdmArray: XdmFunction +open class XdmArray: XdmFunction() -interface XdmNode: XdmItem +open class XdmNode: XdmItem -interface XdmAttribute: XdmNode +open class XdmAttribute: XdmNode() -interface XdmComment: XdmNode +open class XdmComment: XdmNode() -interface XdmDocument: XdmNode +open class XdmDocument: XdmNode() -interface XdmElement: XdmNode +open class XdmElement: XdmNode() -interface XdmNamespace: XdmNode +open class XdmNamespace: XdmNode() -interface XdmProcessingInstruction: XdmNode +open class XdmProcessingInstruction: XdmNode() -interface XdmText: XdmNode +open class XdmText: XdmNode()
--- a/src/main/java/uk/co/reecedunn/intellij/plugin/xdm/model/Items.kt +++ b/src/main/java/uk/co/reecedunn/intellij/plugin/xdm/model/Items.kt @@ -25,22 +25,22 @@ CON DEL interface XdmFunction: XdmItem ADD open class XdmFunction: XdmItem CON DEL interface XdmMap: XdmFunction ADD open class XdmMap: XdmFunction() CON DEL interface XdmArray: XdmFunction ADD open class XdmArray: XdmFunction() CON DEL interface XdmNode: XdmItem ADD open class XdmNode: XdmItem CON DEL interface XdmAttribute: XdmNode ADD open class XdmAttribute: XdmNode() CON DEL interface XdmComment: XdmNode ADD open class XdmComment: XdmNode() CON DEL interface XdmDocument: XdmNode ADD open class XdmDocument: XdmNode() CON DEL interface XdmElement: XdmNode ADD open class XdmElement: XdmNode() CON DEL interface XdmNamespace: XdmNode ADD open class XdmNamespace: XdmNode() CON DEL interface XdmProcessingInstruction: XdmNode ADD open class XdmProcessingInstruction: XdmNode() CON DEL interface XdmText: XdmNode ADD open class XdmText: XdmNode()
<<<<<<< SEARCH interface XdmItem: XdmSequenceType interface XdmFunction: XdmItem interface XdmMap: XdmFunction interface XdmArray: XdmFunction interface XdmNode: XdmItem interface XdmAttribute: XdmNode interface XdmComment: XdmNode interface XdmDocument: XdmNode interface XdmElement: XdmNode interface XdmNamespace: XdmNode interface XdmProcessingInstruction: XdmNode interface XdmText: XdmNode ======= interface XdmItem: XdmSequenceType open class XdmFunction: XdmItem open class XdmMap: XdmFunction() open class XdmArray: XdmFunction() open class XdmNode: XdmItem open class XdmAttribute: XdmNode() open class XdmComment: XdmNode() open class XdmDocument: XdmNode() open class XdmElement: XdmNode() open class XdmNamespace: XdmNode() open class XdmProcessingInstruction: XdmNode() open class XdmText: XdmNode() >>>>>>> REPLACE
CO600GOL/Game_of_life
4359a9947c1d86d9e4003c1e8fc358e9a66c6b1d
DisplayAdapter/display_adapter/scripts/init_db.py
python
mit
Create internal db initialisation script Paired by Michael and Richard
__author__ = 'richard'
""" Script that is run from the command line in order to """ import sys import sqlite3 from display_adapter import db_name help_message = """ This initialises an sqlite3 db for the purposes of the DisplayAdapter programs. Arguments: init_db.py database_name """ runs_table = """ CREATE TABLE runs ( id INTEGER NOT NULL, input_pattern VARCHAR, time_slot DATETIME, user_name VARCHAR(50), PRIMARY KEY (id) ) """ screensavers_table = """ CREATE TABLE screensavers ( pattern VARCHAR ) """ def init_db(db_name=db_name): """ This function takes a database name and creates the database required for the DisplayAdapter programs """ con = sqlite3.connect(db_name) cur = con.cursor() cur.execute(runs_table) cur.execute(screensavers_table) con.commit() con.close() if __name__ == "__main__": if len(sys.argv) < 2: if sys.argv[1].lower() == "help": print(help_message) else: init_db(sys.argv[1]) else: init_db()
53
1
1
mixed
--- a/DisplayAdapter/display_adapter/scripts/init_db.py +++ b/DisplayAdapter/display_adapter/scripts/init_db.py @@ -1 +1,53 @@ -__author__ = 'richard' +""" +Script that is run from the command line in order to +""" +import sys +import sqlite3 +from display_adapter import db_name + +help_message = """ +This initialises an sqlite3 db for the purposes of the DisplayAdapter programs. + +Arguments: init_db.py database_name +""" + +runs_table = """ +CREATE TABLE runs ( + id INTEGER NOT NULL, + input_pattern VARCHAR, + time_slot DATETIME, + user_name VARCHAR(50), + PRIMARY KEY (id) +) +""" + +screensavers_table = """ +CREATE TABLE screensavers ( + pattern VARCHAR +) +""" + + +def init_db(db_name=db_name): + """ + This function takes a database name and creates the database required + for the DisplayAdapter programs + """ + con = sqlite3.connect(db_name) + cur = con.cursor() + + cur.execute(runs_table) + cur.execute(screensavers_table) + + con.commit() + con.close() + + +if __name__ == "__main__": + if len(sys.argv) < 2: + if sys.argv[1].lower() == "help": + print(help_message) + else: + init_db(sys.argv[1]) + else: + init_db()
--- a/DisplayAdapter/display_adapter/scripts/init_db.py +++ b/DisplayAdapter/display_adapter/scripts/init_db.py @@ ... @@ -__author__ = 'richard' +""" +Script that is run from the command line in order to +""" +import sys +import sqlite3 +from display_adapter import db_name + +help_message = """ +This initialises an sqlite3 db for the purposes of the DisplayAdapter programs. + +Arguments: init_db.py database_name +""" + +runs_table = """ +CREATE TABLE runs ( + id INTEGER NOT NULL, + input_pattern VARCHAR, + time_slot DATETIME, + user_name VARCHAR(50), + PRIMARY KEY (id) +) +""" + +screensavers_table = """ +CREATE TABLE screensavers ( + pattern VARCHAR +) +""" + + +def init_db(db_name=db_name): + """ + This function takes a database name and creates the database required + for the DisplayAdapter programs + """ + con = sqlite3.connect(db_name) + cur = con.cursor() + + cur.execute(runs_table) + cur.execute(screensavers_table) + + con.commit() + con.close() + + +if __name__ == "__main__": + if len(sys.argv) < 2: + if sys.argv[1].lower() == "help": + print(help_message) + else: + init_db(sys.argv[1]) + else: + init_db()
--- a/DisplayAdapter/display_adapter/scripts/init_db.py +++ b/DisplayAdapter/display_adapter/scripts/init_db.py @@ -1 +1,53 @@ DEL __author__ = 'richard' ADD """ ADD Script that is run from the command line in order to ADD """ ADD import sys ADD import sqlite3 ADD from display_adapter import db_name ADD ADD help_message = """ ADD This initialises an sqlite3 db for the purposes of the DisplayAdapter programs. ADD ADD Arguments: init_db.py database_name ADD """ ADD ADD runs_table = """ ADD CREATE TABLE runs ( ADD id INTEGER NOT NULL, ADD input_pattern VARCHAR, ADD time_slot DATETIME, ADD user_name VARCHAR(50), ADD PRIMARY KEY (id) ADD ) ADD """ ADD ADD screensavers_table = """ ADD CREATE TABLE screensavers ( ADD pattern VARCHAR ADD ) ADD """ ADD ADD ADD def init_db(db_name=db_name): ADD """ ADD This function takes a database name and creates the database required ADD for the DisplayAdapter programs ADD """ ADD con = sqlite3.connect(db_name) ADD cur = con.cursor() ADD ADD cur.execute(runs_table) ADD cur.execute(screensavers_table) ADD ADD con.commit() ADD con.close() ADD ADD ADD if __name__ == "__main__": ADD if len(sys.argv) < 2: ADD if sys.argv[1].lower() == "help": ADD print(help_message) ADD else: ADD init_db(sys.argv[1]) ADD else: ADD init_db()
<<<<<<< SEARCH __author__ = 'richard' ======= """ Script that is run from the command line in order to """ import sys import sqlite3 from display_adapter import db_name help_message = """ This initialises an sqlite3 db for the purposes of the DisplayAdapter programs. Arguments: init_db.py database_name """ runs_table = """ CREATE TABLE runs ( id INTEGER NOT NULL, input_pattern VARCHAR, time_slot DATETIME, user_name VARCHAR(50), PRIMARY KEY (id) ) """ screensavers_table = """ CREATE TABLE screensavers ( pattern VARCHAR ) """ def init_db(db_name=db_name): """ This function takes a database name and creates the database required for the DisplayAdapter programs """ con = sqlite3.connect(db_name) cur = con.cursor() cur.execute(runs_table) cur.execute(screensavers_table) con.commit() con.close() if __name__ == "__main__": if len(sys.argv) < 2: if sys.argv[1].lower() == "help": print(help_message) else: init_db(sys.argv[1]) else: init_db() >>>>>>> REPLACE
stuartkeith/webaudiosequencer
8696b6454857ec853bc50f6cfa0c4f1095a2d6a6
source/javascript/keyboardShortcuts.js
javascript
mit
Disable keyboard shortcut if target element is an input element.
define(function (require) { var documentKeyboardHandler = require("utilities/documentKeyboardHandler"); var keyboardShortcuts = function (eventBus) { documentKeyboardHandler(function (event, isDown, isRepeated) { // spacebar if (isDown && event.which === 32) { if (!isRepeated) eventBus.trigger("togglePlayback"); return false; } // shift if (event.which === 16) { if (!isRepeated) { eventBus.trigger("setGridViewState", { state: isDown ? "play" : "addOrRemove" }); } return false; } // number keys 1 to 9 if (isDown && event.which >= 49 && event.which <= 57) { if (!isRepeated) eventBus.trigger("selectTrack", { trackIndex: event.which - 49 }); return false; } }); }; return keyboardShortcuts; });
define(function (require) { var $ = require("jquery"); var documentKeyboardHandler = require("utilities/documentKeyboardHandler"); var keyboardShortcuts = function (eventBus) { documentKeyboardHandler(function (event, isDown, isRepeated) { // spacebar if (isDown && event.which === 32) { if (!isRepeated) eventBus.trigger("togglePlayback"); return false; } // shift if (event.which === 16) { if (!isRepeated) { eventBus.trigger("setGridViewState", { state: isDown ? "play" : "addOrRemove" }); } return false; } // number keys 1 to 9 if (isDown && event.which >= 49 && event.which <= 57) { if ($(event.target).is("input")) return; if (!isRepeated) eventBus.trigger("selectTrack", { trackIndex: event.which - 49 }); return false; } }); }; return keyboardShortcuts; });
4
0
2
add_only
--- a/source/javascript/keyboardShortcuts.js +++ b/source/javascript/keyboardShortcuts.js @@ -1,2 +1,3 @@ define(function (require) { + var $ = require("jquery"); var documentKeyboardHandler = require("utilities/documentKeyboardHandler"); @@ -26,2 +27,5 @@ if (isDown && event.which >= 49 && event.which <= 57) { + if ($(event.target).is("input")) + return; + if (!isRepeated)
--- a/source/javascript/keyboardShortcuts.js +++ b/source/javascript/keyboardShortcuts.js @@ ... @@ define(function (require) { + var $ = require("jquery"); var documentKeyboardHandler = require("utilities/documentKeyboardHandler"); @@ ... @@ if (isDown && event.which >= 49 && event.which <= 57) { + if ($(event.target).is("input")) + return; + if (!isRepeated)
--- a/source/javascript/keyboardShortcuts.js +++ b/source/javascript/keyboardShortcuts.js @@ -1,2 +1,3 @@ CON define(function (require) { ADD var $ = require("jquery"); CON var documentKeyboardHandler = require("utilities/documentKeyboardHandler"); @@ -26,2 +27,5 @@ CON if (isDown && event.which >= 49 && event.which <= 57) { ADD if ($(event.target).is("input")) ADD return; ADD CON if (!isRepeated)
<<<<<<< SEARCH define(function (require) { var documentKeyboardHandler = require("utilities/documentKeyboardHandler"); ======= define(function (require) { var $ = require("jquery"); var documentKeyboardHandler = require("utilities/documentKeyboardHandler"); >>>>>>> REPLACE <<<<<<< SEARCH // number keys 1 to 9 if (isDown && event.which >= 49 && event.which <= 57) { if (!isRepeated) eventBus.trigger("selectTrack", { ======= // number keys 1 to 9 if (isDown && event.which >= 49 && event.which <= 57) { if ($(event.target).is("input")) return; if (!isRepeated) eventBus.trigger("selectTrack", { >>>>>>> REPLACE
vimeo/vimeo-networking-java
1057fea300eed1a205ab8a09b8bf0cf6e09c9d74
models/src/main/java/com/vimeo/networking2/LiveChatConfiguration.kt
kotlin
mit
Fix app id field name in live chat configuration
package com.vimeo.networking2 import com.squareup.moshi.Json import com.squareup.moshi.JsonClass import com.vimeo.networking2.annotations.Internal /** * Live chat configuration data. */ @Internal @JsonClass(generateAdapter = true) data class LiveChatConfiguration( /** * The live chat Firebase API key. */ @Internal @Json(name = "api_key") val apiKey: String? = null, /** * The live chat Firebase app ID. */ @Internal @Json(name = "api_id") val appId: String? = null, /** * The live chat Firebase authentication domain. */ @Internal @Json(name = "auth_domain") val authDomain: String? = null, /** * The live chat Firebase database URL. */ @Internal @Json(name = "database_url") val databaseUrl: String? = null, /** * The live chat Firebase messaging sender ID. */ @Internal @Json(name = "messaging_sender_id") val messagingSenderId: String? = null, /** * The live chat Firebase project ID. */ @Internal @Json(name = "project_id") val projectId: String? = null, /** * The live chat Firebase storage bucket. */ @Internal @Json(name = "storage_bucket") val storageBucket: String? = null )
package com.vimeo.networking2 import com.squareup.moshi.Json import com.squareup.moshi.JsonClass import com.vimeo.networking2.annotations.Internal /** * Live chat configuration data. */ @Internal @JsonClass(generateAdapter = true) data class LiveChatConfiguration( /** * The live chat Firebase API key. */ @Internal @Json(name = "api_key") val apiKey: String? = null, /** * The live chat Firebase app ID. */ @Internal @Json(name = "app_id") val appId: String? = null, /** * The live chat Firebase authentication domain. */ @Internal @Json(name = "auth_domain") val authDomain: String? = null, /** * The live chat Firebase database URL. */ @Internal @Json(name = "database_url") val databaseUrl: String? = null, /** * The live chat Firebase messaging sender ID. */ @Internal @Json(name = "messaging_sender_id") val messagingSenderId: String? = null, /** * The live chat Firebase project ID. */ @Internal @Json(name = "project_id") val projectId: String? = null, /** * The live chat Firebase storage bucket. */ @Internal @Json(name = "storage_bucket") val storageBucket: String? = null )
1
1
1
mixed
--- a/models/src/main/java/com/vimeo/networking2/LiveChatConfiguration.kt +++ b/models/src/main/java/com/vimeo/networking2/LiveChatConfiguration.kt @@ -24,3 +24,3 @@ @Internal - @Json(name = "api_id") + @Json(name = "app_id") val appId: String? = null,
--- a/models/src/main/java/com/vimeo/networking2/LiveChatConfiguration.kt +++ b/models/src/main/java/com/vimeo/networking2/LiveChatConfiguration.kt @@ ... @@ @Internal - @Json(name = "api_id") + @Json(name = "app_id") val appId: String? = null,
--- a/models/src/main/java/com/vimeo/networking2/LiveChatConfiguration.kt +++ b/models/src/main/java/com/vimeo/networking2/LiveChatConfiguration.kt @@ -24,3 +24,3 @@ CON @Internal DEL @Json(name = "api_id") ADD @Json(name = "app_id") CON val appId: String? = null,
<<<<<<< SEARCH */ @Internal @Json(name = "api_id") val appId: String? = null, ======= */ @Internal @Json(name = "app_id") val appId: String? = null, >>>>>>> REPLACE
gpbl/react-day-picker
c684a626fd2a3f049ddc3a8cd91a1d6860bd351a
docs/src/code-samples/examples/input-custom-overlay.js
javascript
mit
Make sure we pass the correct props to the overlay
import React from 'react'; import PropTypes from 'prop-types'; import DayPickerInput from 'react-day-picker/DayPickerInput'; import 'react-day-picker/lib/style.css'; function CustomOverlay({ classNames, selectedDay, children }) { return ( <div className={classNames.overlayWrapper} style={{ marginLeft: -100 }}> <div className={classNames.overlay}> <h3>Hello day picker!</h3> <p> {selectedDay ? `You picked: ${selectedDay.toLocaleDateString()}` : 'Please pick a day'} </p> {children} </div> </div> ); } CustomOverlay.propTypes = { classNames: PropTypes.object.isRequired, selectedDay: PropTypes.oneOfType([Date]), children: PropTypes.number.isRequired, }; export default function Example() { return ( <DayPickerInput overlayComponent={CustomOverlay} dayPickerProps={{ todayButton: 'Today', }} /> ); }
import React from 'react'; import PropTypes from 'prop-types'; import DayPickerInput from 'react-day-picker/DayPickerInput'; import 'react-day-picker/lib/style.css'; function CustomOverlay({ classNames, selectedDay, children, ...props }) { return ( <div className={classNames.overlayWrapper} style={{ marginLeft: -100 }} {...props} > <div className={classNames.overlay}> <h3>Hello day picker!</h3> <button onClick={() => console.log('clicked!')}>button</button> <p> {selectedDay ? `You picked: ${selectedDay.toLocaleDateString()}` : 'Please pick a day'} </p> {children} </div> </div> ); } CustomOverlay.propTypes = { classNames: PropTypes.object.isRequired, selectedDay: PropTypes.instanceOf(Date), children: PropTypes.node.isRequired, }; export default function Example() { return ( <DayPickerInput overlayComponent={CustomOverlay} dayPickerProps={{ todayButton: 'Today', }} /> ); }
9
4
2
mixed
--- a/docs/src/code-samples/examples/input-custom-overlay.js +++ b/docs/src/code-samples/examples/input-custom-overlay.js @@ -6,7 +6,12 @@ -function CustomOverlay({ classNames, selectedDay, children }) { +function CustomOverlay({ classNames, selectedDay, children, ...props }) { return ( - <div className={classNames.overlayWrapper} style={{ marginLeft: -100 }}> + <div + className={classNames.overlayWrapper} + style={{ marginLeft: -100 }} + {...props} + > <div className={classNames.overlay}> <h3>Hello day picker!</h3> + <button onClick={() => console.log('clicked!')}>button</button> <p> @@ -24,4 +29,4 @@ classNames: PropTypes.object.isRequired, - selectedDay: PropTypes.oneOfType([Date]), - children: PropTypes.number.isRequired, + selectedDay: PropTypes.instanceOf(Date), + children: PropTypes.node.isRequired, };
--- a/docs/src/code-samples/examples/input-custom-overlay.js +++ b/docs/src/code-samples/examples/input-custom-overlay.js @@ ... @@ -function CustomOverlay({ classNames, selectedDay, children }) { +function CustomOverlay({ classNames, selectedDay, children, ...props }) { return ( - <div className={classNames.overlayWrapper} style={{ marginLeft: -100 }}> + <div + className={classNames.overlayWrapper} + style={{ marginLeft: -100 }} + {...props} + > <div className={classNames.overlay}> <h3>Hello day picker!</h3> + <button onClick={() => console.log('clicked!')}>button</button> <p> @@ ... @@ classNames: PropTypes.object.isRequired, - selectedDay: PropTypes.oneOfType([Date]), - children: PropTypes.number.isRequired, + selectedDay: PropTypes.instanceOf(Date), + children: PropTypes.node.isRequired, };
--- a/docs/src/code-samples/examples/input-custom-overlay.js +++ b/docs/src/code-samples/examples/input-custom-overlay.js @@ -6,7 +6,12 @@ CON DEL function CustomOverlay({ classNames, selectedDay, children }) { ADD function CustomOverlay({ classNames, selectedDay, children, ...props }) { CON return ( DEL <div className={classNames.overlayWrapper} style={{ marginLeft: -100 }}> ADD <div ADD className={classNames.overlayWrapper} ADD style={{ marginLeft: -100 }} ADD {...props} ADD > CON <div className={classNames.overlay}> CON <h3>Hello day picker!</h3> ADD <button onClick={() => console.log('clicked!')}>button</button> CON <p> @@ -24,4 +29,4 @@ CON classNames: PropTypes.object.isRequired, DEL selectedDay: PropTypes.oneOfType([Date]), DEL children: PropTypes.number.isRequired, ADD selectedDay: PropTypes.instanceOf(Date), ADD children: PropTypes.node.isRequired, CON };
<<<<<<< SEARCH import 'react-day-picker/lib/style.css'; function CustomOverlay({ classNames, selectedDay, children }) { return ( <div className={classNames.overlayWrapper} style={{ marginLeft: -100 }}> <div className={classNames.overlay}> <h3>Hello day picker!</h3> <p> {selectedDay ======= import 'react-day-picker/lib/style.css'; function CustomOverlay({ classNames, selectedDay, children, ...props }) { return ( <div className={classNames.overlayWrapper} style={{ marginLeft: -100 }} {...props} > <div className={classNames.overlay}> <h3>Hello day picker!</h3> <button onClick={() => console.log('clicked!')}>button</button> <p> {selectedDay >>>>>>> REPLACE <<<<<<< SEARCH CustomOverlay.propTypes = { classNames: PropTypes.object.isRequired, selectedDay: PropTypes.oneOfType([Date]), children: PropTypes.number.isRequired, }; ======= CustomOverlay.propTypes = { classNames: PropTypes.object.isRequired, selectedDay: PropTypes.instanceOf(Date), children: PropTypes.node.isRequired, }; >>>>>>> REPLACE
kevinschaul/nunjucks
103dd2c40f42d385bce4175ef5b6b07a822b1ee8
tests/api.js
javascript
bsd-2-clause
Add test for relative path and renderString
(function() { var expect, Environment, Loader, templatesPath; if(typeof require != 'undefined') { expect = require('expect.js'); Environment = require('../src/environment').Environment; Loader = require('../src/node-loaders').FileSystemLoader; templatesPath = 'tests/templates'; } else { expect = window.expect; Environment = nunjucks.Environment; Loader = nunjucks.WebLoader; templatesPath = '../templates'; } describe('api', function() { it('should always force compilation of parent template', function() { var env = new Environment(new Loader(templatesPath)); var parent = env.getTemplate('base.html'); var child = env.getTemplate('base-inherit.html'); expect(child.render()).to.be('Foo*Bar*BazFizzle'); }); it('should handle correctly relative paths', function() { var env = new Environment(new Loader(templatesPath)); var child1 = env.getTemplate('relative/test1.html'); var child2 = env.getTemplate('relative/test2.html'); expect(child1.render()).to.be('FooTest1BazFizzle'); expect(child2.render()).to.be('FooTest2BazFizzle'); }); }); })();
(function() { var expect, Environment, Loader, templatesPath; if(typeof require != 'undefined') { expect = require('expect.js'); Environment = require('../src/environment').Environment; Loader = require('../src/node-loaders').FileSystemLoader; templatesPath = 'tests/templates'; } else { expect = window.expect; Environment = nunjucks.Environment; Loader = nunjucks.WebLoader; templatesPath = '../templates'; } describe('api', function() { it('should always force compilation of parent template', function() { var env = new Environment(new Loader(templatesPath)); var parent = env.getTemplate('base.html'); var child = env.getTemplate('base-inherit.html'); expect(child.render()).to.be('Foo*Bar*BazFizzle'); }); it('should handle correctly relative paths', function() { var env = new Environment(new Loader(templatesPath)); var child1 = env.getTemplate('relative/test1.html'); var child2 = env.getTemplate('relative/test2.html'); expect(child1.render()).to.be('FooTest1BazFizzle'); expect(child2.render()).to.be('FooTest2BazFizzle'); }); it('should handle correctly relative paths in renderString', function() { var env = new Environment(new Loader(templatesPath)); expect(env.renderString('{% extends "./relative/test1.html" %}{% block block1 %}Test3{% endblock %}')).to.be('FooTest3BazFizzle'); }); }); })();
5
0
1
add_only
--- a/tests/api.js +++ b/tests/api.js @@ -34,2 +34,7 @@ }); + + it('should handle correctly relative paths in renderString', function() { + var env = new Environment(new Loader(templatesPath)); + expect(env.renderString('{% extends "./relative/test1.html" %}{% block block1 %}Test3{% endblock %}')).to.be('FooTest3BazFizzle'); + }); });
--- a/tests/api.js +++ b/tests/api.js @@ ... @@ }); + + it('should handle correctly relative paths in renderString', function() { + var env = new Environment(new Loader(templatesPath)); + expect(env.renderString('{% extends "./relative/test1.html" %}{% block block1 %}Test3{% endblock %}')).to.be('FooTest3BazFizzle'); + }); });
--- a/tests/api.js +++ b/tests/api.js @@ -34,2 +34,7 @@ CON }); ADD ADD it('should handle correctly relative paths in renderString', function() { ADD var env = new Environment(new Loader(templatesPath)); ADD expect(env.renderString('{% extends "./relative/test1.html" %}{% block block1 %}Test3{% endblock %}')).to.be('FooTest3BazFizzle'); ADD }); CON });
<<<<<<< SEARCH expect(child2.render()).to.be('FooTest2BazFizzle'); }); }); })(); ======= expect(child2.render()).to.be('FooTest2BazFizzle'); }); it('should handle correctly relative paths in renderString', function() { var env = new Environment(new Loader(templatesPath)); expect(env.renderString('{% extends "./relative/test1.html" %}{% block block1 %}Test3{% endblock %}')).to.be('FooTest3BazFizzle'); }); }); })(); >>>>>>> REPLACE
LouisCAD/Splitties
3a5a3e682cb96e63573d42049ebf2206d3c23b98
sample/src/main/kotlin/com/louiscad/splittiessample/extensions/View.kt
kotlin
apache-2.0
Add contracts to view visibility scope extensions
package com.louiscad.splittiessample.extensions import android.view.View import androidx.core.view.isInvisible import androidx.core.view.isVisible inline fun <R> View.visibleInScope(finallyInvisible: Boolean = false, block: () -> R) = try { isVisible = true block() } finally { visibility = if (finallyInvisible) View.INVISIBLE else View.GONE } inline fun <R> View.goneInScope(block: () -> R) = try { isVisible = false block() } finally { isVisible = true } inline fun <R> View.invisibleInScope(block: () -> R) = try { isInvisible = true block() } finally { isInvisible = false }
package com.louiscad.splittiessample.extensions import android.view.View import androidx.core.view.isInvisible import androidx.core.view.isVisible import kotlin.contracts.ExperimentalContracts import kotlin.contracts.InvocationKind import kotlin.contracts.contract @ExperimentalContracts inline fun <R> View.visibleInScope(finallyInvisible: Boolean = false, block: () -> R): R { contract { callsInPlace(block, InvocationKind.EXACTLY_ONCE) } return try { isVisible = true block() } finally { visibility = if (finallyInvisible) View.INVISIBLE else View.GONE } } @ExperimentalContracts inline fun <R> View.goneInScope(block: () -> R): R { contract { callsInPlace(block, InvocationKind.EXACTLY_ONCE) } return try { isVisible = false block() } finally { isVisible = true } } @ExperimentalContracts inline fun <R> View.invisibleInScope(block: () -> R): R { contract { callsInPlace(block, InvocationKind.EXACTLY_ONCE) } return try { isInvisible = true block() } finally { isInvisible = false } }
30
15
1
mixed
--- a/sample/src/main/kotlin/com/louiscad/splittiessample/extensions/View.kt +++ b/sample/src/main/kotlin/com/louiscad/splittiessample/extensions/View.kt @@ -5,22 +5,37 @@ import androidx.core.view.isVisible +import kotlin.contracts.ExperimentalContracts +import kotlin.contracts.InvocationKind +import kotlin.contracts.contract -inline fun <R> View.visibleInScope(finallyInvisible: Boolean = false, block: () -> R) = try { - isVisible = true - block() -} finally { - visibility = if (finallyInvisible) View.INVISIBLE else View.GONE +@ExperimentalContracts +inline fun <R> View.visibleInScope(finallyInvisible: Boolean = false, block: () -> R): R { + contract { callsInPlace(block, InvocationKind.EXACTLY_ONCE) } + return try { + isVisible = true + block() + } finally { + visibility = if (finallyInvisible) View.INVISIBLE else View.GONE + } } -inline fun <R> View.goneInScope(block: () -> R) = try { - isVisible = false - block() -} finally { - isVisible = true +@ExperimentalContracts +inline fun <R> View.goneInScope(block: () -> R): R { + contract { callsInPlace(block, InvocationKind.EXACTLY_ONCE) } + return try { + isVisible = false + block() + } finally { + isVisible = true + } } -inline fun <R> View.invisibleInScope(block: () -> R) = try { - isInvisible = true - block() -} finally { - isInvisible = false +@ExperimentalContracts +inline fun <R> View.invisibleInScope(block: () -> R): R { + contract { callsInPlace(block, InvocationKind.EXACTLY_ONCE) } + return try { + isInvisible = true + block() + } finally { + isInvisible = false + } }
--- a/sample/src/main/kotlin/com/louiscad/splittiessample/extensions/View.kt +++ b/sample/src/main/kotlin/com/louiscad/splittiessample/extensions/View.kt @@ ... @@ import androidx.core.view.isVisible +import kotlin.contracts.ExperimentalContracts +import kotlin.contracts.InvocationKind +import kotlin.contracts.contract -inline fun <R> View.visibleInScope(finallyInvisible: Boolean = false, block: () -> R) = try { - isVisible = true - block() -} finally { - visibility = if (finallyInvisible) View.INVISIBLE else View.GONE +@ExperimentalContracts +inline fun <R> View.visibleInScope(finallyInvisible: Boolean = false, block: () -> R): R { + contract { callsInPlace(block, InvocationKind.EXACTLY_ONCE) } + return try { + isVisible = true + block() + } finally { + visibility = if (finallyInvisible) View.INVISIBLE else View.GONE + } } -inline fun <R> View.goneInScope(block: () -> R) = try { - isVisible = false - block() -} finally { - isVisible = true +@ExperimentalContracts +inline fun <R> View.goneInScope(block: () -> R): R { + contract { callsInPlace(block, InvocationKind.EXACTLY_ONCE) } + return try { + isVisible = false + block() + } finally { + isVisible = true + } } -inline fun <R> View.invisibleInScope(block: () -> R) = try { - isInvisible = true - block() -} finally { - isInvisible = false +@ExperimentalContracts +inline fun <R> View.invisibleInScope(block: () -> R): R { + contract { callsInPlace(block, InvocationKind.EXACTLY_ONCE) } + return try { + isInvisible = true + block() + } finally { + isInvisible = false + } }
--- a/sample/src/main/kotlin/com/louiscad/splittiessample/extensions/View.kt +++ b/sample/src/main/kotlin/com/louiscad/splittiessample/extensions/View.kt @@ -5,22 +5,37 @@ CON import androidx.core.view.isVisible ADD import kotlin.contracts.ExperimentalContracts ADD import kotlin.contracts.InvocationKind ADD import kotlin.contracts.contract CON DEL inline fun <R> View.visibleInScope(finallyInvisible: Boolean = false, block: () -> R) = try { DEL isVisible = true DEL block() DEL } finally { DEL visibility = if (finallyInvisible) View.INVISIBLE else View.GONE ADD @ExperimentalContracts ADD inline fun <R> View.visibleInScope(finallyInvisible: Boolean = false, block: () -> R): R { ADD contract { callsInPlace(block, InvocationKind.EXACTLY_ONCE) } ADD return try { ADD isVisible = true ADD block() ADD } finally { ADD visibility = if (finallyInvisible) View.INVISIBLE else View.GONE ADD } CON } CON DEL inline fun <R> View.goneInScope(block: () -> R) = try { DEL isVisible = false DEL block() DEL } finally { DEL isVisible = true ADD @ExperimentalContracts ADD inline fun <R> View.goneInScope(block: () -> R): R { ADD contract { callsInPlace(block, InvocationKind.EXACTLY_ONCE) } ADD return try { ADD isVisible = false ADD block() ADD } finally { ADD isVisible = true ADD } CON } CON DEL inline fun <R> View.invisibleInScope(block: () -> R) = try { DEL isInvisible = true DEL block() DEL } finally { DEL isInvisible = false ADD @ExperimentalContracts ADD inline fun <R> View.invisibleInScope(block: () -> R): R { ADD contract { callsInPlace(block, InvocationKind.EXACTLY_ONCE) } ADD return try { ADD isInvisible = true ADD block() ADD } finally { ADD isInvisible = false ADD } CON }
<<<<<<< SEARCH import androidx.core.view.isInvisible import androidx.core.view.isVisible inline fun <R> View.visibleInScope(finallyInvisible: Boolean = false, block: () -> R) = try { isVisible = true block() } finally { visibility = if (finallyInvisible) View.INVISIBLE else View.GONE } inline fun <R> View.goneInScope(block: () -> R) = try { isVisible = false block() } finally { isVisible = true } inline fun <R> View.invisibleInScope(block: () -> R) = try { isInvisible = true block() } finally { isInvisible = false } ======= import androidx.core.view.isInvisible import androidx.core.view.isVisible import kotlin.contracts.ExperimentalContracts import kotlin.contracts.InvocationKind import kotlin.contracts.contract @ExperimentalContracts inline fun <R> View.visibleInScope(finallyInvisible: Boolean = false, block: () -> R): R { contract { callsInPlace(block, InvocationKind.EXACTLY_ONCE) } return try { isVisible = true block() } finally { visibility = if (finallyInvisible) View.INVISIBLE else View.GONE } } @ExperimentalContracts inline fun <R> View.goneInScope(block: () -> R): R { contract { callsInPlace(block, InvocationKind.EXACTLY_ONCE) } return try { isVisible = false block() } finally { isVisible = true } } @ExperimentalContracts inline fun <R> View.invisibleInScope(block: () -> R): R { contract { callsInPlace(block, InvocationKind.EXACTLY_ONCE) } return try { isInvisible = true block() } finally { isInvisible = false } } >>>>>>> REPLACE
robinverduijn/gradle
511bc32dc756da929efab3b4904947d37c5a41e1
buildSrc/build.gradle.kts
kotlin
apache-2.0
Enable progressive kotlin-dsl on buildSrc Signed-off-by: Paul Merlin <[email protected]>
import org.gradle.kotlin.dsl.plugins.precompiled.PrecompiledScriptPlugins import org.jetbrains.kotlin.gradle.tasks.KotlinCompile buildscript { val pluginsExperiments = "gradle.plugin.org.gradle.kotlin:gradle-kotlin-dsl-plugins-experiments:0.1.10" dependencies { classpath(pluginsExperiments) } project.dependencies { "compile"(pluginsExperiments) } configure(listOf(repositories, project.repositories)) { gradlePluginPortal() } } plugins { `java-gradle-plugin` `kotlin-dsl` } apply(plugin = "org.gradle.kotlin.ktlint-convention") apply<PrecompiledScriptPlugins>() tasks.withType<KotlinCompile> { kotlinOptions { freeCompilerArgs += listOf( "-Xjsr305=strict", "-Xskip-runtime-version-check", "-Xskip-metadata-version-check" ) } } dependencies { compileOnly(gradleKotlinDsl()) compile(kotlin("gradle-plugin")) compile(kotlin("stdlib-jdk8")) compile(kotlin("reflect")) compile("com.gradle.publish:plugin-publish-plugin:0.9.10") compile("org.ow2.asm:asm-all:5.1") testCompile("junit:junit:4.12") testCompile(gradleTestKit()) }
import org.gradle.kotlin.dsl.plugins.dsl.ProgressiveModeState import org.gradle.kotlin.dsl.plugins.precompiled.PrecompiledScriptPlugins import org.jetbrains.kotlin.gradle.tasks.KotlinCompile buildscript { val pluginsExperiments = "gradle.plugin.org.gradle.kotlin:gradle-kotlin-dsl-plugins-experiments:0.1.10" dependencies { classpath(pluginsExperiments) } project.dependencies { "compile"(pluginsExperiments) } configure(listOf(repositories, project.repositories)) { gradlePluginPortal() } } plugins { `java-gradle-plugin` `kotlin-dsl` } kotlinDslPluginOptions { progressive.set(ProgressiveModeState.ENABLED) } apply(plugin = "org.gradle.kotlin.ktlint-convention") apply<PrecompiledScriptPlugins>() tasks.withType<KotlinCompile> { kotlinOptions { freeCompilerArgs += listOf( "-Xjsr305=strict", "-Xskip-runtime-version-check", "-Xskip-metadata-version-check" ) } } dependencies { compileOnly(gradleKotlinDsl()) compile(kotlin("gradle-plugin")) compile(kotlin("stdlib-jdk8")) compile(kotlin("reflect")) compile("com.gradle.publish:plugin-publish-plugin:0.9.10") compile("org.ow2.asm:asm-all:5.1") testCompile("junit:junit:4.12") testCompile(gradleTestKit()) }
5
0
2
add_only
--- a/buildSrc/build.gradle.kts +++ b/buildSrc/build.gradle.kts @@ -1 +1,2 @@ +import org.gradle.kotlin.dsl.plugins.dsl.ProgressiveModeState import org.gradle.kotlin.dsl.plugins.precompiled.PrecompiledScriptPlugins @@ -23,2 +24,6 @@ `kotlin-dsl` +} + +kotlinDslPluginOptions { + progressive.set(ProgressiveModeState.ENABLED) }
--- a/buildSrc/build.gradle.kts +++ b/buildSrc/build.gradle.kts @@ ... @@ +import org.gradle.kotlin.dsl.plugins.dsl.ProgressiveModeState import org.gradle.kotlin.dsl.plugins.precompiled.PrecompiledScriptPlugins @@ ... @@ `kotlin-dsl` +} + +kotlinDslPluginOptions { + progressive.set(ProgressiveModeState.ENABLED) }
--- a/buildSrc/build.gradle.kts +++ b/buildSrc/build.gradle.kts @@ -1 +1,2 @@ ADD import org.gradle.kotlin.dsl.plugins.dsl.ProgressiveModeState CON import org.gradle.kotlin.dsl.plugins.precompiled.PrecompiledScriptPlugins @@ -23,2 +24,6 @@ CON `kotlin-dsl` ADD } ADD ADD kotlinDslPluginOptions { ADD progressive.set(ProgressiveModeState.ENABLED) CON }
<<<<<<< SEARCH import org.gradle.kotlin.dsl.plugins.precompiled.PrecompiledScriptPlugins import org.jetbrains.kotlin.gradle.tasks.KotlinCompile ======= import org.gradle.kotlin.dsl.plugins.dsl.ProgressiveModeState import org.gradle.kotlin.dsl.plugins.precompiled.PrecompiledScriptPlugins import org.jetbrains.kotlin.gradle.tasks.KotlinCompile >>>>>>> REPLACE <<<<<<< SEARCH `java-gradle-plugin` `kotlin-dsl` } ======= `java-gradle-plugin` `kotlin-dsl` } kotlinDslPluginOptions { progressive.set(ProgressiveModeState.ENABLED) } >>>>>>> REPLACE
ramonrabello/Kiphy
144f3d488c2becd5e469938face9bafa2ccb4e6b
app/src/main/kotlin/com/github/ramonrabello/kiphy/trends/TrendingPresenter.kt
kotlin
apache-2.0
Update class after refactoring presenter contract.
package com.github.ramonrabello.kiphy.trends import com.github.ramonrabello.kiphy.BuildConfig import com.github.ramonrabello.kiphy.data.GiphyApi import com.github.ramonrabello.kiphy.data.model.Trending import retrofit2.Call import retrofit2.Callback import retrofit2.Response /** * Presenter implementation for TrendingContract.Presenter. */ class TrendingPresenter(private val view:TrendingContract.View): TrendingContract.Presenter { override fun loadTrending() { view.showProgress() if (BuildConfig.GIPHY_API_KEY.isEmpty() || BuildConfig.GIPHY_API_KEY == "PASTE_YOUR_API_KEY_HERE"){ view.hideProgress() view.showApikeyError() } GiphyApi.trending().load().enqueue(object: Callback<Trending> { override fun onResponse(call: Call<Trending>, response: Response<Trending>) { if (response.isSuccessful){ view.hideProgress() response.body()?.let { view.showTrending(it) } } else { view.showTrendingError() } } override fun onFailure(call: Call<Trending>, t: Throwable) { view.showTrendingError() } }) } override fun onTrendingClick(view: TrendingContract.View) { TODO("not implemented") //To change body of created functions use File | Settings | File Templates. } }
package com.github.ramonrabello.kiphy.trends import com.github.ramonrabello.kiphy.BuildConfig import com.github.ramonrabello.kiphy.data.GiphyApi import com.github.ramonrabello.kiphy.trends.model.TrendingResponse import retrofit2.Call import retrofit2.Callback import retrofit2.Response /** * Presenter implementation for TrendingContract.Presenter. */ class TrendingPresenter(private val view:TrendingContract.View): TrendingContract.Presenter { override fun loadTrends() { view.showProgress() if (BuildConfig.GIPHY_API_KEY.isEmpty()){ view.hideProgress() view.showApiKeyNotSetDialog() } GiphyApi.trending().load().enqueue(object: Callback<TrendingResponse> { override fun onResponse(call: Call<TrendingResponse>, response: Response<TrendingResponse>) { if (response.isSuccessful){ view.hideProgress() response.body()?.let { body -> view.showTrending(body) } } else { view.showTrendingError() } } override fun onFailure(call: Call<TrendingResponse>, t: Throwable) { view.showTrendingError() } }) } }
8
13
4
mixed
--- a/app/src/main/kotlin/com/github/ramonrabello/kiphy/trends/TrendingPresenter.kt +++ b/app/src/main/kotlin/com/github/ramonrabello/kiphy/trends/TrendingPresenter.kt @@ -4,3 +4,3 @@ import com.github.ramonrabello.kiphy.data.GiphyApi -import com.github.ramonrabello.kiphy.data.model.Trending +import com.github.ramonrabello.kiphy.trends.model.TrendingResponse import retrofit2.Call @@ -14,17 +14,16 @@ - override fun loadTrending() { + override fun loadTrends() { view.showProgress() - if (BuildConfig.GIPHY_API_KEY.isEmpty() || - BuildConfig.GIPHY_API_KEY == "PASTE_YOUR_API_KEY_HERE"){ + if (BuildConfig.GIPHY_API_KEY.isEmpty()){ view.hideProgress() - view.showApikeyError() + view.showApiKeyNotSetDialog() } - GiphyApi.trending().load().enqueue(object: Callback<Trending> { + GiphyApi.trending().load().enqueue(object: Callback<TrendingResponse> { - override fun onResponse(call: Call<Trending>, response: Response<Trending>) { + override fun onResponse(call: Call<TrendingResponse>, response: Response<TrendingResponse>) { if (response.isSuccessful){ view.hideProgress() - response.body()?.let { view.showTrending(it) } + response.body()?.let { body -> view.showTrending(body) } } else { @@ -34,3 +33,3 @@ - override fun onFailure(call: Call<Trending>, t: Throwable) { + override fun onFailure(call: Call<TrendingResponse>, t: Throwable) { view.showTrendingError() @@ -39,6 +38,2 @@ } - - override fun onTrendingClick(view: TrendingContract.View) { - TODO("not implemented") //To change body of created functions use File | Settings | File Templates. - } }
--- a/app/src/main/kotlin/com/github/ramonrabello/kiphy/trends/TrendingPresenter.kt +++ b/app/src/main/kotlin/com/github/ramonrabello/kiphy/trends/TrendingPresenter.kt @@ ... @@ import com.github.ramonrabello.kiphy.data.GiphyApi -import com.github.ramonrabello.kiphy.data.model.Trending +import com.github.ramonrabello.kiphy.trends.model.TrendingResponse import retrofit2.Call @@ ... @@ - override fun loadTrending() { + override fun loadTrends() { view.showProgress() - if (BuildConfig.GIPHY_API_KEY.isEmpty() || - BuildConfig.GIPHY_API_KEY == "PASTE_YOUR_API_KEY_HERE"){ + if (BuildConfig.GIPHY_API_KEY.isEmpty()){ view.hideProgress() - view.showApikeyError() + view.showApiKeyNotSetDialog() } - GiphyApi.trending().load().enqueue(object: Callback<Trending> { + GiphyApi.trending().load().enqueue(object: Callback<TrendingResponse> { - override fun onResponse(call: Call<Trending>, response: Response<Trending>) { + override fun onResponse(call: Call<TrendingResponse>, response: Response<TrendingResponse>) { if (response.isSuccessful){ view.hideProgress() - response.body()?.let { view.showTrending(it) } + response.body()?.let { body -> view.showTrending(body) } } else { @@ ... @@ - override fun onFailure(call: Call<Trending>, t: Throwable) { + override fun onFailure(call: Call<TrendingResponse>, t: Throwable) { view.showTrendingError() @@ ... @@ } - - override fun onTrendingClick(view: TrendingContract.View) { - TODO("not implemented") //To change body of created functions use File | Settings | File Templates. - } }
--- a/app/src/main/kotlin/com/github/ramonrabello/kiphy/trends/TrendingPresenter.kt +++ b/app/src/main/kotlin/com/github/ramonrabello/kiphy/trends/TrendingPresenter.kt @@ -4,3 +4,3 @@ CON import com.github.ramonrabello.kiphy.data.GiphyApi DEL import com.github.ramonrabello.kiphy.data.model.Trending ADD import com.github.ramonrabello.kiphy.trends.model.TrendingResponse CON import retrofit2.Call @@ -14,17 +14,16 @@ CON DEL override fun loadTrending() { ADD override fun loadTrends() { CON view.showProgress() CON DEL if (BuildConfig.GIPHY_API_KEY.isEmpty() || DEL BuildConfig.GIPHY_API_KEY == "PASTE_YOUR_API_KEY_HERE"){ ADD if (BuildConfig.GIPHY_API_KEY.isEmpty()){ CON view.hideProgress() DEL view.showApikeyError() ADD view.showApiKeyNotSetDialog() CON } CON DEL GiphyApi.trending().load().enqueue(object: Callback<Trending> { ADD GiphyApi.trending().load().enqueue(object: Callback<TrendingResponse> { CON DEL override fun onResponse(call: Call<Trending>, response: Response<Trending>) { ADD override fun onResponse(call: Call<TrendingResponse>, response: Response<TrendingResponse>) { CON if (response.isSuccessful){ CON view.hideProgress() DEL response.body()?.let { view.showTrending(it) } ADD response.body()?.let { body -> view.showTrending(body) } CON } else { @@ -34,3 +33,3 @@ CON DEL override fun onFailure(call: Call<Trending>, t: Throwable) { ADD override fun onFailure(call: Call<TrendingResponse>, t: Throwable) { CON view.showTrendingError() @@ -39,6 +38,2 @@ CON } DEL DEL override fun onTrendingClick(view: TrendingContract.View) { DEL TODO("not implemented") //To change body of created functions use File | Settings | File Templates. DEL } CON }
<<<<<<< SEARCH import com.github.ramonrabello.kiphy.BuildConfig import com.github.ramonrabello.kiphy.data.GiphyApi import com.github.ramonrabello.kiphy.data.model.Trending import retrofit2.Call import retrofit2.Callback ======= import com.github.ramonrabello.kiphy.BuildConfig import com.github.ramonrabello.kiphy.data.GiphyApi import com.github.ramonrabello.kiphy.trends.model.TrendingResponse import retrofit2.Call import retrofit2.Callback >>>>>>> REPLACE <<<<<<< SEARCH class TrendingPresenter(private val view:TrendingContract.View): TrendingContract.Presenter { override fun loadTrending() { view.showProgress() if (BuildConfig.GIPHY_API_KEY.isEmpty() || BuildConfig.GIPHY_API_KEY == "PASTE_YOUR_API_KEY_HERE"){ view.hideProgress() view.showApikeyError() } GiphyApi.trending().load().enqueue(object: Callback<Trending> { override fun onResponse(call: Call<Trending>, response: Response<Trending>) { if (response.isSuccessful){ view.hideProgress() response.body()?.let { view.showTrending(it) } } else { view.showTrendingError() } } override fun onFailure(call: Call<Trending>, t: Throwable) { view.showTrendingError() } }) } override fun onTrendingClick(view: TrendingContract.View) { TODO("not implemented") //To change body of created functions use File | Settings | File Templates. } } ======= class TrendingPresenter(private val view:TrendingContract.View): TrendingContract.Presenter { override fun loadTrends() { view.showProgress() if (BuildConfig.GIPHY_API_KEY.isEmpty()){ view.hideProgress() view.showApiKeyNotSetDialog() } GiphyApi.trending().load().enqueue(object: Callback<TrendingResponse> { override fun onResponse(call: Call<TrendingResponse>, response: Response<TrendingResponse>) { if (response.isSuccessful){ view.hideProgress() response.body()?.let { body -> view.showTrending(body) } } else { view.showTrendingError() } } override fun onFailure(call: Call<TrendingResponse>, t: Throwable) { view.showTrendingError() } }) } } >>>>>>> REPLACE
AbhilashSrivastava/haraka_sniffer
e471f8c4da271dabebc070e77df77d888c328fc4
haraka.js
javascript
mit
Fix paths not working on node 0.6 (hack alert)
#!/usr/bin/env node "use strict"; var path = require('path'); // this must be set before "server.js" is loaded process.env.HARAKA = process.env.HARAKA || path.resolve('.'); try { require.paths.push(path.join(process.env.HARAKA, 'node_modules')); } catch(e) { process.env.NODE_PATH += ':' + path.join(process.env.HARAKA, 'node_modules'); } var fs = require('fs'); var logger = require('./logger'); var server = require('./server'); exports.version = JSON.parse( fs.readFileSync(path.join(__dirname, './package.json'), 'utf8') ).version; process.on('uncaughtException', function (err) { if (err.stack) { err.stack.split("\n").forEach(function (line) { logger.logcrit(line); }); } else { logger.logcrit('Caught exception: ' + err); } logger.dump_logs(); process.exit(1); }); logger.log("INFO", "Starting up Haraka version " + exports.version); server.createServer();
#!/usr/bin/env node "use strict"; var path = require('path'); // this must be set before "server.js" is loaded process.env.HARAKA = process.env.HARAKA || path.resolve('.'); try { require.paths.push(path.join(process.env.HARAKA, 'node_modules')); } catch(e) { process.env.NODE_PATH = process.env.NODE_PATH ? (process.env.NODE_PATH + ':' + path.join(process.env.HARAKA, 'node_modules')) : (path.join(process.env.HARAKA, 'node_modules')); require('module')._initPaths(); // Horrible hack } var fs = require('fs'); var logger = require('./logger'); var server = require('./server'); exports.version = JSON.parse( fs.readFileSync(path.join(__dirname, './package.json'), 'utf8') ).version; process.on('uncaughtException', function (err) { if (err.stack) { err.stack.split("\n").forEach(function (line) { logger.logcrit(line); }); } else { logger.logcrit('Caught exception: ' + err); } logger.dump_logs(); process.exit(1); }); logger.log("INFO", "Starting up Haraka version " + exports.version); server.createServer();
5
1
1
mixed
--- a/haraka.js +++ b/haraka.js @@ -12,3 +12,7 @@ catch(e) { - process.env.NODE_PATH += ':' + path.join(process.env.HARAKA, 'node_modules'); + process.env.NODE_PATH = process.env.NODE_PATH ? + (process.env.NODE_PATH + ':' + path.join(process.env.HARAKA, 'node_modules')) + : + (path.join(process.env.HARAKA, 'node_modules')); + require('module')._initPaths(); // Horrible hack }
--- a/haraka.js +++ b/haraka.js @@ ... @@ catch(e) { - process.env.NODE_PATH += ':' + path.join(process.env.HARAKA, 'node_modules'); + process.env.NODE_PATH = process.env.NODE_PATH ? + (process.env.NODE_PATH + ':' + path.join(process.env.HARAKA, 'node_modules')) + : + (path.join(process.env.HARAKA, 'node_modules')); + require('module')._initPaths(); // Horrible hack }
--- a/haraka.js +++ b/haraka.js @@ -12,3 +12,7 @@ CON catch(e) { DEL process.env.NODE_PATH += ':' + path.join(process.env.HARAKA, 'node_modules'); ADD process.env.NODE_PATH = process.env.NODE_PATH ? ADD (process.env.NODE_PATH + ':' + path.join(process.env.HARAKA, 'node_modules')) ADD : ADD (path.join(process.env.HARAKA, 'node_modules')); ADD require('module')._initPaths(); // Horrible hack CON }
<<<<<<< SEARCH } catch(e) { process.env.NODE_PATH += ':' + path.join(process.env.HARAKA, 'node_modules'); } ======= } catch(e) { process.env.NODE_PATH = process.env.NODE_PATH ? (process.env.NODE_PATH + ':' + path.join(process.env.HARAKA, 'node_modules')) : (path.join(process.env.HARAKA, 'node_modules')); require('module')._initPaths(); // Horrible hack } >>>>>>> REPLACE
bogdanvaduva/ol3
0377df5e0648230f1022426a1144c84cccf9400e
examples/getfeatureinfo.js
javascript
bsd-2-clause
Use evt.coordinate instead of evt.getCoordinate()
goog.require('ol.Map'); goog.require('ol.RendererHint'); goog.require('ol.View2D'); goog.require('ol.layer.Tile'); goog.require('ol.source.TileWMS'); var wmsSource = new ol.source.TileWMS({ url: 'http://demo.opengeo.org/geoserver/wms', params: {'LAYERS': 'ne:ne'} }); var wmsLayer = new ol.layer.Tile({ source: wmsSource }); var view = new ol.View2D({ center: [0, 0], zoom: 1 }); var viewProjection = /** @type {ol.proj.Projection} */ (view.getProjection()); var map = new ol.Map({ layers: [wmsLayer], renderer: ol.RendererHint.CANVAS, target: 'map', view: view }); map.on('singleclick', function(evt) { document.getElementById('info').innerHTML = ''; var viewResolution = /** @type {number} */ (view.getResolution()); var url = wmsSource.getGetFeatureInfoUrl( evt.getCoordinate(), viewResolution, viewProjection, {'INFO_FORMAT': 'text/html'}); if (url) { document.getElementById('info').innerHTML = '<iframe seamless src="' + url + '"></iframe>'; } });
goog.require('ol.Map'); goog.require('ol.RendererHint'); goog.require('ol.View2D'); goog.require('ol.layer.Tile'); goog.require('ol.source.TileWMS'); var wmsSource = new ol.source.TileWMS({ url: 'http://demo.opengeo.org/geoserver/wms', params: {'LAYERS': 'ne:ne'} }); var wmsLayer = new ol.layer.Tile({ source: wmsSource }); var view = new ol.View2D({ center: [0, 0], zoom: 1 }); var viewProjection = /** @type {ol.proj.Projection} */ (view.getProjection()); var map = new ol.Map({ layers: [wmsLayer], renderer: ol.RendererHint.CANVAS, target: 'map', view: view }); map.on('singleclick', function(evt) { document.getElementById('info').innerHTML = ''; var viewResolution = /** @type {number} */ (view.getResolution()); var url = wmsSource.getGetFeatureInfoUrl( evt.coordinate, viewResolution, viewProjection, {'INFO_FORMAT': 'text/html'}); if (url) { document.getElementById('info').innerHTML = '<iframe seamless src="' + url + '"></iframe>'; } });
1
1
1
mixed
--- a/examples/getfeatureinfo.js +++ b/examples/getfeatureinfo.js @@ -35,3 +35,3 @@ var url = wmsSource.getGetFeatureInfoUrl( - evt.getCoordinate(), viewResolution, viewProjection, + evt.coordinate, viewResolution, viewProjection, {'INFO_FORMAT': 'text/html'});
--- a/examples/getfeatureinfo.js +++ b/examples/getfeatureinfo.js @@ ... @@ var url = wmsSource.getGetFeatureInfoUrl( - evt.getCoordinate(), viewResolution, viewProjection, + evt.coordinate, viewResolution, viewProjection, {'INFO_FORMAT': 'text/html'});
--- a/examples/getfeatureinfo.js +++ b/examples/getfeatureinfo.js @@ -35,3 +35,3 @@ CON var url = wmsSource.getGetFeatureInfoUrl( DEL evt.getCoordinate(), viewResolution, viewProjection, ADD evt.coordinate, viewResolution, viewProjection, CON {'INFO_FORMAT': 'text/html'});
<<<<<<< SEARCH var viewResolution = /** @type {number} */ (view.getResolution()); var url = wmsSource.getGetFeatureInfoUrl( evt.getCoordinate(), viewResolution, viewProjection, {'INFO_FORMAT': 'text/html'}); if (url) { ======= var viewResolution = /** @type {number} */ (view.getResolution()); var url = wmsSource.getGetFeatureInfoUrl( evt.coordinate, viewResolution, viewProjection, {'INFO_FORMAT': 'text/html'}); if (url) { >>>>>>> REPLACE
aptos-labs/aptos-core
bca1dfc7088f85a8dfdeb87243b3fe221f7b6577
libra_node/src/main.rs
rust
apache-2.0
[LibraNode] Fix ordering argument for term Use acquire/release instead of relaxed.
// Copyright (c) The Libra Core Contributors // SPDX-License-Identifier: Apache-2.0 use executable_helpers::helpers::{ setup_executable, ARG_CONFIG_PATH, ARG_DISABLE_LOGGING, ARG_PEER_ID, }; use signal_hook; use std::sync::{ atomic::{AtomicBool, Ordering}, Arc, }; fn register_signals(term: Arc<AtomicBool>) { for signal in &[ signal_hook::SIGTERM, signal_hook::SIGINT, signal_hook::SIGHUP, ] { let term_clone = Arc::clone(&term); let thread = std::thread::current(); unsafe { signal_hook::register(*signal, move || { term_clone.store(true, Ordering::Relaxed); thread.unpark(); }) .expect("failed to register signal handler"); } } } fn main() { let (config, _logger, _args) = setup_executable( "Libra single node".to_string(), vec![ARG_PEER_ID, ARG_CONFIG_PATH, ARG_DISABLE_LOGGING], ); let (_ac_handle, _node_handle) = libra_node::main_node::setup_environment(&config); let term = Arc::new(AtomicBool::new(false)); register_signals(Arc::clone(&term)); while !term.load(Ordering::Relaxed) { std::thread::park(); } }
// Copyright (c) The Libra Core Contributors // SPDX-License-Identifier: Apache-2.0 use executable_helpers::helpers::{ setup_executable, ARG_CONFIG_PATH, ARG_DISABLE_LOGGING, ARG_PEER_ID, }; use signal_hook; use std::sync::{ atomic::{AtomicBool, Ordering}, Arc, }; fn register_signals(term: Arc<AtomicBool>) { for signal in &[ signal_hook::SIGTERM, signal_hook::SIGINT, signal_hook::SIGHUP, ] { let term_clone = Arc::clone(&term); let thread = std::thread::current(); unsafe { signal_hook::register(*signal, move || { term_clone.store(true, Ordering::Release); thread.unpark(); }) .expect("failed to register signal handler"); } } } fn main() { let (config, _logger, _args) = setup_executable( "Libra single node".to_string(), vec![ARG_PEER_ID, ARG_CONFIG_PATH, ARG_DISABLE_LOGGING], ); let (_ac_handle, _node_handle) = libra_node::main_node::setup_environment(&config); let term = Arc::new(AtomicBool::new(false)); register_signals(Arc::clone(&term)); while !term.load(Ordering::Acquire) { std::thread::park(); } }
2
2
2
mixed
--- a/libra_node/src/main.rs +++ b/libra_node/src/main.rs @@ -22,3 +22,3 @@ signal_hook::register(*signal, move || { - term_clone.store(true, Ordering::Relaxed); + term_clone.store(true, Ordering::Release); thread.unpark(); @@ -40,3 +40,3 @@ - while !term.load(Ordering::Relaxed) { + while !term.load(Ordering::Acquire) { std::thread::park();
--- a/libra_node/src/main.rs +++ b/libra_node/src/main.rs @@ ... @@ signal_hook::register(*signal, move || { - term_clone.store(true, Ordering::Relaxed); + term_clone.store(true, Ordering::Release); thread.unpark(); @@ ... @@ - while !term.load(Ordering::Relaxed) { + while !term.load(Ordering::Acquire) { std::thread::park();
--- a/libra_node/src/main.rs +++ b/libra_node/src/main.rs @@ -22,3 +22,3 @@ CON signal_hook::register(*signal, move || { DEL term_clone.store(true, Ordering::Relaxed); ADD term_clone.store(true, Ordering::Release); CON thread.unpark(); @@ -40,3 +40,3 @@ CON DEL while !term.load(Ordering::Relaxed) { ADD while !term.load(Ordering::Acquire) { CON std::thread::park();
<<<<<<< SEARCH unsafe { signal_hook::register(*signal, move || { term_clone.store(true, Ordering::Relaxed); thread.unpark(); }) ======= unsafe { signal_hook::register(*signal, move || { term_clone.store(true, Ordering::Release); thread.unpark(); }) >>>>>>> REPLACE <<<<<<< SEARCH register_signals(Arc::clone(&term)); while !term.load(Ordering::Relaxed) { std::thread::park(); } ======= register_signals(Arc::clone(&term)); while !term.load(Ordering::Acquire) { std::thread::park(); } >>>>>>> REPLACE
anton-okolelov/intellij-rust
b32f2f735f767dbf6751c017aed460b7fa07939d
src/test/kotlin/org/rust/ide/annotator/RsTraitMethodImplLineMarkerProviderTest.kt
kotlin
mit
T: Extend line marker related tests for "const" and "type"
/* * Use of this source code is governed by the MIT license that can be * found in the LICENSE file. */ package org.rust.ide.annotator import org.rust.ide.lineMarkers.RsLineMarkerProviderTestBase /** * Tests for Trait Method Implementation Line Marker */ class RsTraitMethodImplLineMarkerProviderTest : RsLineMarkerProviderTestBase() { fun `test impl`() = doTestByText(""" trait Foo { // - Has implementations fn foo(&self); fn bar(&self) { self.foo(); } } struct Bar {} // - Has implementations impl Foo for Bar { fn foo(&self) { // - Implements method in `Foo` } fn bar(&self) { // - Overrides method in `Foo` } } """) fun `test icon position`() = doTestByText(""" trait Foo // - Has implementations { fn foo (&self); type T1 ; const C1 : u32; } struct Bar // - Has implementations {} impl Foo for Bar { /// /// Documentation /// #[warn(non_camel_case_types)] fn foo // - Implements method in `Foo` (&self) { } type T1 = (); const C1 : u32 = 1; } """) }
/* * Use of this source code is governed by the MIT license that can be * found in the LICENSE file. */ package org.rust.ide.annotator import org.rust.ide.lineMarkers.RsLineMarkerProviderTestBase /** * Tests for Trait Method Implementation Line Marker */ class RsTraitMethodImplLineMarkerProviderTest : RsLineMarkerProviderTestBase() { fun `test impl`() = doTestByText(""" trait Foo { // - Has implementations fn foo(&self); fn bar(&self) { self.foo(); } type T1; type T2 = (); const C1: u32; const C2: u32 = 1; } struct Bar {} // - Has implementations impl Foo for Bar { fn foo(&self) { // - Implements method in `Foo` } fn bar(&self) { // - Overrides method in `Foo` } type T1 = (); type T2 = (); const C1: u32 = 1; const C2: u32 = 1; } """) fun `test icon position`() = doTestByText(""" trait Foo // - Has implementations { fn foo (&self); type T1 ; const C1 : u32; } struct Bar // - Has implementations {} impl Foo for Bar { /// /// Documentation /// #[warn(non_camel_case_types)] fn foo // - Implements method in `Foo` (&self) { } type T1 = (); const C1 : u32 = 1; } """) }
8
0
2
add_only
--- a/src/test/kotlin/org/rust/ide/annotator/RsTraitMethodImplLineMarkerProviderTest.kt +++ b/src/test/kotlin/org/rust/ide/annotator/RsTraitMethodImplLineMarkerProviderTest.kt @@ -20,2 +20,6 @@ } + type T1; + type T2 = (); + const C1: u32; + const C2: u32 = 1; } @@ -27,2 +31,6 @@ } + type T1 = (); + type T2 = (); + const C1: u32 = 1; + const C2: u32 = 1; }
--- a/src/test/kotlin/org/rust/ide/annotator/RsTraitMethodImplLineMarkerProviderTest.kt +++ b/src/test/kotlin/org/rust/ide/annotator/RsTraitMethodImplLineMarkerProviderTest.kt @@ ... @@ } + type T1; + type T2 = (); + const C1: u32; + const C2: u32 = 1; } @@ ... @@ } + type T1 = (); + type T2 = (); + const C1: u32 = 1; + const C2: u32 = 1; }
--- a/src/test/kotlin/org/rust/ide/annotator/RsTraitMethodImplLineMarkerProviderTest.kt +++ b/src/test/kotlin/org/rust/ide/annotator/RsTraitMethodImplLineMarkerProviderTest.kt @@ -20,2 +20,6 @@ CON } ADD type T1; ADD type T2 = (); ADD const C1: u32; ADD const C2: u32 = 1; CON } @@ -27,2 +31,6 @@ CON } ADD type T1 = (); ADD type T2 = (); ADD const C1: u32 = 1; ADD const C2: u32 = 1; CON }
<<<<<<< SEARCH self.foo(); } } struct Bar {} // - Has implementations ======= self.foo(); } type T1; type T2 = (); const C1: u32; const C2: u32 = 1; } struct Bar {} // - Has implementations >>>>>>> REPLACE <<<<<<< SEARCH fn bar(&self) { // - Overrides method in `Foo` } } """) ======= fn bar(&self) { // - Overrides method in `Foo` } type T1 = (); type T2 = (); const C1: u32 = 1; const C2: u32 = 1; } """) >>>>>>> REPLACE
smartlogic/smartchat-android
ca778ee95c0e3c52065feda21e1e1a4e8c284cd1
SmartChat/src/main/java/io/smartlogic/smartchat/GcmIntentService.java
java
mit
Use the media id as notification id
package io.smartlogic.smartchat; import android.app.IntentService; import android.app.NotificationManager; import android.app.PendingIntent; import android.content.Context; import android.content.Intent; import android.os.Bundle; import android.support.v4.app.NotificationCompat; import android.support.v4.app.TaskStackBuilder; import io.smartlogic.smartchat.activities.DisplaySmartChatActivity; import io.smartlogic.smartchat.activities.MainActivity; public class GcmIntentService extends IntentService { public GcmIntentService() { super("GcmIntentService"); } @Override protected void onHandleIntent(Intent intent) { Bundle extras = intent.getExtras(); NotificationCompat.Builder mBuilder = new NotificationCompat.Builder(this) .setAutoCancel(true) .setSmallIcon(R.drawable.ic_launcher) .setContentTitle("New SmartChat") .setContentText("SmartChat from " + extras.getString("creator_email")); Intent resultIntent = new Intent(this, DisplaySmartChatActivity.class); resultIntent.putExtras(extras); TaskStackBuilder stackBuilder = TaskStackBuilder.create(this); stackBuilder.addParentStack(MainActivity.class); stackBuilder.addNextIntent(resultIntent); PendingIntent resultPendingIntent = stackBuilder.getPendingIntent(0, PendingIntent.FLAG_UPDATE_CURRENT); mBuilder.setContentIntent(resultPendingIntent); NotificationManager mNotificationManager = (NotificationManager) this.getSystemService(Context.NOTIFICATION_SERVICE); mNotificationManager.notify(1, mBuilder.build()); } }
package io.smartlogic.smartchat; import android.app.IntentService; import android.app.NotificationManager; import android.app.PendingIntent; import android.content.Context; import android.content.Intent; import android.os.Bundle; import android.support.v4.app.NotificationCompat; import android.support.v4.app.TaskStackBuilder; import io.smartlogic.smartchat.activities.DisplaySmartChatActivity; import io.smartlogic.smartchat.activities.MainActivity; public class GcmIntentService extends IntentService { public GcmIntentService() { super("GcmIntentService"); } @Override protected void onHandleIntent(Intent intent) { Bundle extras = intent.getExtras(); NotificationCompat.Builder mBuilder = new NotificationCompat.Builder(this) .setAutoCancel(true) .setSmallIcon(R.drawable.ic_launcher) .setContentTitle("New SmartChat") .setContentText("SmartChat from " + extras.getString("creator_email")); Intent resultIntent = new Intent(this, DisplaySmartChatActivity.class); resultIntent.putExtras(extras); TaskStackBuilder stackBuilder = TaskStackBuilder.create(this); stackBuilder.addParentStack(MainActivity.class); stackBuilder.addNextIntent(resultIntent); PendingIntent resultPendingIntent = stackBuilder.getPendingIntent(0, PendingIntent.FLAG_UPDATE_CURRENT); mBuilder.setContentIntent(resultPendingIntent); NotificationManager mNotificationManager = (NotificationManager) this.getSystemService(Context.NOTIFICATION_SERVICE); mNotificationManager.notify(extras.getInt("id", 1), mBuilder.build()); } }
1
1
1
mixed
--- a/SmartChat/src/main/java/io/smartlogic/smartchat/GcmIntentService.java +++ b/SmartChat/src/main/java/io/smartlogic/smartchat/GcmIntentService.java @@ -38,3 +38,3 @@ NotificationManager mNotificationManager = (NotificationManager) this.getSystemService(Context.NOTIFICATION_SERVICE); - mNotificationManager.notify(1, mBuilder.build()); + mNotificationManager.notify(extras.getInt("id", 1), mBuilder.build()); }
--- a/SmartChat/src/main/java/io/smartlogic/smartchat/GcmIntentService.java +++ b/SmartChat/src/main/java/io/smartlogic/smartchat/GcmIntentService.java @@ ... @@ NotificationManager mNotificationManager = (NotificationManager) this.getSystemService(Context.NOTIFICATION_SERVICE); - mNotificationManager.notify(1, mBuilder.build()); + mNotificationManager.notify(extras.getInt("id", 1), mBuilder.build()); }
--- a/SmartChat/src/main/java/io/smartlogic/smartchat/GcmIntentService.java +++ b/SmartChat/src/main/java/io/smartlogic/smartchat/GcmIntentService.java @@ -38,3 +38,3 @@ CON NotificationManager mNotificationManager = (NotificationManager) this.getSystemService(Context.NOTIFICATION_SERVICE); DEL mNotificationManager.notify(1, mBuilder.build()); ADD mNotificationManager.notify(extras.getInt("id", 1), mBuilder.build()); CON }
<<<<<<< SEARCH NotificationManager mNotificationManager = (NotificationManager) this.getSystemService(Context.NOTIFICATION_SERVICE); mNotificationManager.notify(1, mBuilder.build()); } } ======= NotificationManager mNotificationManager = (NotificationManager) this.getSystemService(Context.NOTIFICATION_SERVICE); mNotificationManager.notify(extras.getInt("id", 1), mBuilder.build()); } } >>>>>>> REPLACE
intellij-purescript/intellij-purescript
cb48d093ac7d4cd9e3edea605210656679934272
src/main/java/org/purescript/psi/ValueReference.kt
kotlin
bsd-3-clause
Change return type for variants Co-authored-by: Simon Olander Sahlén <[email protected]>
package org.purescript.psi import com.intellij.openapi.util.TextRange import com.intellij.psi.* import com.intellij.psi.PsiElementResolveResult.createResults class ValueReference(element: PSVar) : PsiReferenceBase.Poly<PSVar>( element, TextRange.allOf(element.text.trim()), false ) { override fun getVariants(): Array<PSValueDeclaration> { val currentModule = myElement.module return ( currentModule.valueDeclarations + currentModule.importedValueDeclarations ).toList().toTypedArray() } override fun multiResolve(incompleteCode: Boolean): Array<ResolveResult> { val name = myElement.text.trim() val module = myElement.module val importedModules = module .importDeclarations .asSequence() .filter { it.isNotHidingName(name) } .map { ModuleReference(it).resolve() } .filterNotNull() val localDeclarations = module .valueDeclarationsByName .getOrDefault(name, emptyList()) .asSequence() val importedDeclarations = importedModules .map { it.exportedValueDeclarationsByName[name] } .filterNotNull() .flatMap { it.asSequence() } val declarations = (importedDeclarations + localDeclarations).filterNotNull().toList() return createResults(declarations) } }
package org.purescript.psi import com.intellij.openapi.util.TextRange import com.intellij.psi.PsiElementResolveResult.createResults import com.intellij.psi.PsiNamedElement import com.intellij.psi.PsiReferenceBase import com.intellij.psi.ResolveResult class ValueReference(element: PSVar) : PsiReferenceBase.Poly<PSVar>( element, TextRange.allOf(element.text.trim()), false ) { override fun getVariants(): Array<PsiNamedElement> { val currentModule = myElement.module return ( currentModule.valueDeclarations + currentModule.importedValueDeclarations ).toList().toTypedArray() } override fun multiResolve(incompleteCode: Boolean): Array<ResolveResult> { val name = myElement.text.trim() val module = myElement.module val importedModules = module .importDeclarations .asSequence() .filter { it.isNotHidingName(name) } .map { ModuleReference(it).resolve() } .filterNotNull() val localDeclarations = module .valueDeclarationsByName .getOrDefault(name, emptyList()) .asSequence() val importedDeclarations = importedModules .map { it.exportedValueDeclarationsByName[name] } .filterNotNull() .flatMap { it.asSequence() } val declarations = (importedDeclarations + localDeclarations).filterNotNull().toList() return createResults(declarations) } }
4
2
2
mixed
--- a/src/main/java/org/purescript/psi/ValueReference.kt +++ b/src/main/java/org/purescript/psi/ValueReference.kt @@ -3,4 +3,6 @@ import com.intellij.openapi.util.TextRange -import com.intellij.psi.* import com.intellij.psi.PsiElementResolveResult.createResults +import com.intellij.psi.PsiNamedElement +import com.intellij.psi.PsiReferenceBase +import com.intellij.psi.ResolveResult @@ -12,3 +14,3 @@ - override fun getVariants(): Array<PSValueDeclaration> { + override fun getVariants(): Array<PsiNamedElement> { val currentModule = myElement.module
--- a/src/main/java/org/purescript/psi/ValueReference.kt +++ b/src/main/java/org/purescript/psi/ValueReference.kt @@ ... @@ import com.intellij.openapi.util.TextRange -import com.intellij.psi.* import com.intellij.psi.PsiElementResolveResult.createResults +import com.intellij.psi.PsiNamedElement +import com.intellij.psi.PsiReferenceBase +import com.intellij.psi.ResolveResult @@ ... @@ - override fun getVariants(): Array<PSValueDeclaration> { + override fun getVariants(): Array<PsiNamedElement> { val currentModule = myElement.module
--- a/src/main/java/org/purescript/psi/ValueReference.kt +++ b/src/main/java/org/purescript/psi/ValueReference.kt @@ -3,4 +3,6 @@ CON import com.intellij.openapi.util.TextRange DEL import com.intellij.psi.* CON import com.intellij.psi.PsiElementResolveResult.createResults ADD import com.intellij.psi.PsiNamedElement ADD import com.intellij.psi.PsiReferenceBase ADD import com.intellij.psi.ResolveResult CON @@ -12,3 +14,3 @@ CON DEL override fun getVariants(): Array<PSValueDeclaration> { ADD override fun getVariants(): Array<PsiNamedElement> { CON val currentModule = myElement.module
<<<<<<< SEARCH import com.intellij.openapi.util.TextRange import com.intellij.psi.* import com.intellij.psi.PsiElementResolveResult.createResults class ValueReference(element: PSVar) : PsiReferenceBase.Poly<PSVar>( ======= import com.intellij.openapi.util.TextRange import com.intellij.psi.PsiElementResolveResult.createResults import com.intellij.psi.PsiNamedElement import com.intellij.psi.PsiReferenceBase import com.intellij.psi.ResolveResult class ValueReference(element: PSVar) : PsiReferenceBase.Poly<PSVar>( >>>>>>> REPLACE <<<<<<< SEARCH ) { override fun getVariants(): Array<PSValueDeclaration> { val currentModule = myElement.module return ( ======= ) { override fun getVariants(): Array<PsiNamedElement> { val currentModule = myElement.module return ( >>>>>>> REPLACE
zummenix/mprovision
ca2171f1c94763b777791ca1a9c55a9179f96ce0
src/main.rs
rust
mit
Remove unnecessary arg name for 'count' subcommand.
extern crate mprovision; extern crate clap; use std::fs; use clap::{Arg, App, AppSettings, SubCommand}; fn main() { let count_subcommand = SubCommand::with_name("count") .about("Counts provisioning profiles in a directory.") .arg(Arg::with_name("DIRECTORY") .short("d") .long("directory") .help("Directory where to count provisioning profiles.") .required(false) .takes_value(true)); let matches = App::new("mprovision") .setting(AppSettings::SubcommandRequired) .version("0.1.0") .about("A tool that helps iOS developers to manage mobileprovision files.") .subcommand(count_subcommand) .get_matches(); if let Some(matches) = matches.subcommand_matches("count") { handle_count_subcommand(matches) } } fn handle_count_subcommand(matches: &clap::ArgMatches) { fn show(result: mprovision::Result<Box<Iterator<Item=fs::DirEntry>>>) { match result { Ok(files) => println!("Found {} files.", files.count()), Err(err) => println!("Error: {}", err), } } if let Some(directory) = matches.value_of("DIRECTORY") { show(mprovision::files(directory)); } else { match mprovision::directory() { Ok(directory) => show(mprovision::files(directory)), Err(err) => println!("Error: {}", err), } } }
extern crate mprovision; extern crate clap; use std::fs; use clap::{Arg, App, AppSettings, SubCommand}; fn main() { let count_subcommand = SubCommand::with_name("count") .about("Counts provisioning profiles in a directory.") .arg(Arg::with_name("DIRECTORY") .help("Directory where to count provisioning profiles.") .required(false)); let matches = App::new("mprovision") .setting(AppSettings::SubcommandRequired) .version("0.1.0") .about("A tool that helps iOS developers to manage mobileprovision files.") .subcommand(count_subcommand) .get_matches(); if let Some(matches) = matches.subcommand_matches("count") { handle_count_subcommand(matches) } } fn handle_count_subcommand(matches: &clap::ArgMatches) { fn show(result: mprovision::Result<Box<Iterator<Item=fs::DirEntry>>>) { match result { Ok(files) => println!("Found {} files.", files.count()), Err(err) => println!("Error: {}", err), } } if let Some(directory) = matches.value_of("DIRECTORY") { show(mprovision::files(directory)); } else { match mprovision::directory() { Ok(directory) => show(mprovision::files(directory)), Err(err) => println!("Error: {}", err), } } }
1
4
1
mixed
--- a/src/main.rs +++ b/src/main.rs @@ -11,7 +11,4 @@ .arg(Arg::with_name("DIRECTORY") - .short("d") - .long("directory") .help("Directory where to count provisioning profiles.") - .required(false) - .takes_value(true)); + .required(false));
--- a/src/main.rs +++ b/src/main.rs @@ ... @@ .arg(Arg::with_name("DIRECTORY") - .short("d") - .long("directory") .help("Directory where to count provisioning profiles.") - .required(false) - .takes_value(true)); + .required(false));
--- a/src/main.rs +++ b/src/main.rs @@ -11,7 +11,4 @@ CON .arg(Arg::with_name("DIRECTORY") DEL .short("d") DEL .long("directory") CON .help("Directory where to count provisioning profiles.") DEL .required(false) DEL .takes_value(true)); ADD .required(false)); CON
<<<<<<< SEARCH .about("Counts provisioning profiles in a directory.") .arg(Arg::with_name("DIRECTORY") .short("d") .long("directory") .help("Directory where to count provisioning profiles.") .required(false) .takes_value(true)); let matches = App::new("mprovision") ======= .about("Counts provisioning profiles in a directory.") .arg(Arg::with_name("DIRECTORY") .help("Directory where to count provisioning profiles.") .required(false)); let matches = App::new("mprovision") >>>>>>> REPLACE
ministryofjustice/cla_frontend
f36cd010e5d0450959ab21153d3ca98650a82d00
cla_frontend/assets-src/javascripts/app/js/directives/callbackStatus.js
javascript
mit
Refresh logs when callback stopped
(function() { 'use strict'; var mod = angular.module('cla.directives'); mod.directive('callbackStatus', ['AppSettings', '$filter', 'flash', function (AppSettings, filter, flash) { return { restrict: 'E', transclude: true, templateUrl: 'directives/callbackStatus.html', scope: { 'case': '=' }, link: function (scope, elm) { // remove if not enabled if (!AppSettings.callMeBackEnabled) { elm.remove(); return; } var builtInDateFilter = filter('date'); scope.time = builtInDateFilter(scope.case.getCallbackDatetime(), 'HH:mm \'on\' d MMM yy'); scope.completeCallback = function() { scope.case.$complete_call_me_back().then(function() { scope.case.requires_action_at = null; scope.case.callback_attempt = 0; elm.remove(); flash('Callback cancelled successfully'); }); }; if (!scope.time) { elm.remove(); return; } } }; }]); })();
(function() { 'use strict'; var mod = angular.module('cla.directives'); mod.directive('callbackStatus', ['AppSettings', '$filter', 'flash', 'postal', function (AppSettings, filter, flash, postal) { return { restrict: 'E', transclude: true, templateUrl: 'directives/callbackStatus.html', scope: { 'case': '=' }, link: function (scope, elm) { // remove if not enabled if (!AppSettings.callMeBackEnabled) { elm.remove(); return; } var builtInDateFilter = filter('date'); scope.time = builtInDateFilter(scope.case.getCallbackDatetime(), 'HH:mm \'on\' d MMM yy'); scope.completeCallback = function() { scope.case.$complete_call_me_back().then(function() { scope.case.requires_action_at = null; scope.case.callback_attempt = 0; elm.remove(); flash('Callback stopped successfully'); // refreshing the logs postal.publish({ channel : 'models', topic : 'Log.refresh' }); }); }; if (!scope.time) { elm.remove(); return; } } }; }]); })();
7
2
2
mixed
--- a/cla_frontend/assets-src/javascripts/app/js/directives/callbackStatus.js +++ b/cla_frontend/assets-src/javascripts/app/js/directives/callbackStatus.js @@ -5,3 +5,3 @@ - mod.directive('callbackStatus', ['AppSettings', '$filter', 'flash', function (AppSettings, filter, flash) { + mod.directive('callbackStatus', ['AppSettings', '$filter', 'flash', 'postal', function (AppSettings, filter, flash, postal) { return { @@ -30,3 +30,8 @@ elm.remove(); - flash('Callback cancelled successfully'); + flash('Callback stopped successfully'); + // refreshing the logs + postal.publish({ + channel : 'models', + topic : 'Log.refresh' + }); });
--- a/cla_frontend/assets-src/javascripts/app/js/directives/callbackStatus.js +++ b/cla_frontend/assets-src/javascripts/app/js/directives/callbackStatus.js @@ ... @@ - mod.directive('callbackStatus', ['AppSettings', '$filter', 'flash', function (AppSettings, filter, flash) { + mod.directive('callbackStatus', ['AppSettings', '$filter', 'flash', 'postal', function (AppSettings, filter, flash, postal) { return { @@ ... @@ elm.remove(); - flash('Callback cancelled successfully'); + flash('Callback stopped successfully'); + // refreshing the logs + postal.publish({ + channel : 'models', + topic : 'Log.refresh' + }); });
--- a/cla_frontend/assets-src/javascripts/app/js/directives/callbackStatus.js +++ b/cla_frontend/assets-src/javascripts/app/js/directives/callbackStatus.js @@ -5,3 +5,3 @@ CON DEL mod.directive('callbackStatus', ['AppSettings', '$filter', 'flash', function (AppSettings, filter, flash) { ADD mod.directive('callbackStatus', ['AppSettings', '$filter', 'flash', 'postal', function (AppSettings, filter, flash, postal) { CON return { @@ -30,3 +30,8 @@ CON elm.remove(); DEL flash('Callback cancelled successfully'); ADD flash('Callback stopped successfully'); ADD // refreshing the logs ADD postal.publish({ ADD channel : 'models', ADD topic : 'Log.refresh' ADD }); CON });
<<<<<<< SEARCH var mod = angular.module('cla.directives'); mod.directive('callbackStatus', ['AppSettings', '$filter', 'flash', function (AppSettings, filter, flash) { return { restrict: 'E', ======= var mod = angular.module('cla.directives'); mod.directive('callbackStatus', ['AppSettings', '$filter', 'flash', 'postal', function (AppSettings, filter, flash, postal) { return { restrict: 'E', >>>>>>> REPLACE <<<<<<< SEARCH elm.remove(); flash('Callback cancelled successfully'); }); }; ======= elm.remove(); flash('Callback stopped successfully'); // refreshing the logs postal.publish({ channel : 'models', topic : 'Log.refresh' }); }); }; >>>>>>> REPLACE
Kotlin/dokka
a6c9bfb98b1b298e125d69cc0854fcc94259ad59
src/Generation/ConsoleGenerator.kt
kotlin
apache-2.0
Sort members by name for output.
package org.jetbrains.dokka public class ConsoleGenerator(val signatureGenerator : SignatureGenerator) { val IndentStep = " " public fun generate(node: DocumentationNode, indent: String = "") { generateHeader(node, indent) //generateDetails(node, indent) generateMembers(node, indent) generateLinks(node, indent) } public fun generateHeader(node: DocumentationNode, indent: String = "") { println(indent + signatureGenerator.render(node)) val docString = node.doc.toString() if (!docString.isEmpty()) println("$indent\"${docString.replace("\n", "\n$indent")}\"") println() } public fun generateMembers(node: DocumentationNode, indent: String = "") { val items = node.members for (child in items) generate(child, indent + IndentStep) } public fun generateDetails(node: DocumentationNode, indent: String = "") { val items = node.details for (child in items) generate(child, indent + " ") } public fun generateLinks(node: DocumentationNode, indent: String = "") { val items = node.links if (items.isEmpty()) return println("$indent Links") for (child in items) generate(child, indent + " ") } }
package org.jetbrains.dokka public class ConsoleGenerator(val signatureGenerator : SignatureGenerator) { val IndentStep = " " public fun generate(node: DocumentationNode, indent: String = "") { generateHeader(node, indent) //generateDetails(node, indent) generateMembers(node, indent) generateLinks(node, indent) } public fun generateHeader(node: DocumentationNode, indent: String = "") { println(indent + signatureGenerator.render(node)) val docString = node.doc.toString() if (!docString.isEmpty()) println("$indent\"${docString.replace("\n", "\n$indent")}\"") println() } public fun generateMembers(node: DocumentationNode, indent: String = "") { val items = node.members.sortBy { it.name } for (child in items) generate(child, indent + IndentStep) } public fun generateDetails(node: DocumentationNode, indent: String = "") { val items = node.details for (child in items) generate(child, indent + " ") } public fun generateLinks(node: DocumentationNode, indent: String = "") { val items = node.links if (items.isEmpty()) return println("$indent Links") for (child in items) generate(child, indent + " ") } }
1
1
1
mixed
--- a/src/Generation/ConsoleGenerator.kt +++ b/src/Generation/ConsoleGenerator.kt @@ -21,3 +21,3 @@ public fun generateMembers(node: DocumentationNode, indent: String = "") { - val items = node.members + val items = node.members.sortBy { it.name } for (child in items)
--- a/src/Generation/ConsoleGenerator.kt +++ b/src/Generation/ConsoleGenerator.kt @@ ... @@ public fun generateMembers(node: DocumentationNode, indent: String = "") { - val items = node.members + val items = node.members.sortBy { it.name } for (child in items)
--- a/src/Generation/ConsoleGenerator.kt +++ b/src/Generation/ConsoleGenerator.kt @@ -21,3 +21,3 @@ CON public fun generateMembers(node: DocumentationNode, indent: String = "") { DEL val items = node.members ADD val items = node.members.sortBy { it.name } CON for (child in items)
<<<<<<< SEARCH public fun generateMembers(node: DocumentationNode, indent: String = "") { val items = node.members for (child in items) generate(child, indent + IndentStep) ======= public fun generateMembers(node: DocumentationNode, indent: String = "") { val items = node.members.sortBy { it.name } for (child in items) generate(child, indent + IndentStep) >>>>>>> REPLACE
taycaldwell/riot-api-java
312780ca5daba94f6ee876250d929c063f1d7c2a
src/constant/PlatformId.java
java
apache-2.0
Add PBE support to current-game and featured-games endpoints
package constant; /* * Copyright 2015 Taylor Caldwell * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ public enum PlatformId { NA("NA1", "na"), BR("BR1", "br"), LAN("LA1", "lan"), LAS("LA2", "las"), OCE("OC1", "oce"), EUNE("EUN1", "eune"), EUW("EUW1", "euw"), KR("KR", "kr"), RU("RU", "ru"), TR("TR1", "tr"); private String id; private String name; PlatformId(String id, String name) { this.id = id; this.name = name; } public String getId() { return id; } public String getName() { return name; } }
package constant; /* * Copyright 2015 Taylor Caldwell * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ public enum PlatformId { NA("NA1", "na"), BR("BR1", "br"), LAN("LA1", "lan"), LAS("LA2", "las"), OCE("OC1", "oce"), EUNE("EUN1", "eune"), EUW("EUW1", "euw"), KR("KR", "kr"), RU("RU", "ru"), TR("TR1", "tr"), PBE("PBE1", "pbe"); private String id; private String name; PlatformId(String id, String name) { this.id = id; this.name = name; } public String getId() { return id; } public String getName() { return name; } }
2
1
1
mixed
--- a/src/constant/PlatformId.java +++ b/src/constant/PlatformId.java @@ -29,3 +29,4 @@ RU("RU", "ru"), - TR("TR1", "tr"); + TR("TR1", "tr"), + PBE("PBE1", "pbe");
--- a/src/constant/PlatformId.java +++ b/src/constant/PlatformId.java @@ ... @@ RU("RU", "ru"), - TR("TR1", "tr"); + TR("TR1", "tr"), + PBE("PBE1", "pbe");
--- a/src/constant/PlatformId.java +++ b/src/constant/PlatformId.java @@ -29,3 +29,4 @@ CON RU("RU", "ru"), DEL TR("TR1", "tr"); ADD TR("TR1", "tr"), ADD PBE("PBE1", "pbe"); CON
<<<<<<< SEARCH KR("KR", "kr"), RU("RU", "ru"), TR("TR1", "tr"); private String id; ======= KR("KR", "kr"), RU("RU", "ru"), TR("TR1", "tr"), PBE("PBE1", "pbe"); private String id; >>>>>>> REPLACE
intellij-purescript/intellij-purescript
9fa3ae1dc604292453e50aec01c5c2561c0e9ad8
src/main/java/org/purescript/parser/Parsec.kt
kotlin
bsd-3-clause
Implement seq as plus and use that in then
package org.purescript.parser import com.intellij.psi.tree.IElementType import java.util.* abstract class Parsec { var name: String? = null get() { if (field == null) { field = calcName() } return field } private set var expectedName: HashSet<String?>? = null get() { if (field == null) { field = calcExpectedName() } return field } private set private var canBeEmpty: Boolean? = null abstract fun parse(context: ParserContext): ParserInfo protected abstract fun calcName(): String protected abstract fun calcExpectedName(): HashSet<String?> fun then(next: Parsec): Parsec { return Combinators.seq(this, next) } fun lexeme(type: IElementType): Parsec { return then(Combinators.lexeme(Combinators.token(type))) } fun or(next: Parsec): Parsec { return Combinators.choice(this, next) } fun `as`(node: IElementType): SymbolicParsec { return SymbolicParsec(this, node) } abstract fun canStartWith(type: IElementType): Boolean fun canBeEmpty(): Boolean { if (canBeEmpty == null) { canBeEmpty = calcCanBeEmpty() } return canBeEmpty!! } protected abstract fun calcCanBeEmpty(): Boolean }
package org.purescript.parser import com.intellij.psi.tree.IElementType import java.util.* abstract class Parsec { var name: String? = null get() { if (field == null) { field = calcName() } return field } private set var expectedName: HashSet<String?>? = null get() { if (field == null) { field = calcExpectedName() } return field } private set private var canBeEmpty: Boolean? = null abstract fun parse(context: ParserContext): ParserInfo protected abstract fun calcName(): String protected abstract fun calcExpectedName(): HashSet<String?> operator fun plus(other: Parsec): Parsec { return Combinators.seq(this, other) } fun then(next: Parsec): Parsec { return this + next } fun lexeme(type: IElementType): Parsec { return then(Combinators.lexeme(Combinators.token(type))) } fun or(next: Parsec): Parsec { return Combinators.choice(this, next) } fun `as`(node: IElementType): SymbolicParsec { return SymbolicParsec(this, node) } abstract fun canStartWith(type: IElementType): Boolean fun canBeEmpty(): Boolean { if (canBeEmpty == null) { canBeEmpty = calcCanBeEmpty() } return canBeEmpty!! } protected abstract fun calcCanBeEmpty(): Boolean }
4
1
1
mixed
--- a/src/main/java/org/purescript/parser/Parsec.kt +++ b/src/main/java/org/purescript/parser/Parsec.kt @@ -26,4 +26,7 @@ protected abstract fun calcExpectedName(): HashSet<String?> + operator fun plus(other: Parsec): Parsec { + return Combinators.seq(this, other) + } fun then(next: Parsec): Parsec { - return Combinators.seq(this, next) + return this + next }
--- a/src/main/java/org/purescript/parser/Parsec.kt +++ b/src/main/java/org/purescript/parser/Parsec.kt @@ ... @@ protected abstract fun calcExpectedName(): HashSet<String?> + operator fun plus(other: Parsec): Parsec { + return Combinators.seq(this, other) + } fun then(next: Parsec): Parsec { - return Combinators.seq(this, next) + return this + next }
--- a/src/main/java/org/purescript/parser/Parsec.kt +++ b/src/main/java/org/purescript/parser/Parsec.kt @@ -26,4 +26,7 @@ CON protected abstract fun calcExpectedName(): HashSet<String?> ADD operator fun plus(other: Parsec): Parsec { ADD return Combinators.seq(this, other) ADD } CON fun then(next: Parsec): Parsec { DEL return Combinators.seq(this, next) ADD return this + next CON }
<<<<<<< SEARCH protected abstract fun calcName(): String protected abstract fun calcExpectedName(): HashSet<String?> fun then(next: Parsec): Parsec { return Combinators.seq(this, next) } ======= protected abstract fun calcName(): String protected abstract fun calcExpectedName(): HashSet<String?> operator fun plus(other: Parsec): Parsec { return Combinators.seq(this, other) } fun then(next: Parsec): Parsec { return this + next } >>>>>>> REPLACE
generators-io-projects/generators
0863317d969ce1de7e90cc6c55a8615040af877f
generators-core/src/main/java/io/generators/core/RandomFromCollectionGenerator.java
java
apache-2.0
Remove insane double ternary operator Ternary operators are fine, but putting one inside another is hard to read and confusing.
package io.generators.core; import javax.annotation.Nonnull; import java.util.Collection; import java.util.List; import java.util.Random; import static com.google.common.base.Preconditions.checkNotNull; import static com.google.common.collect.ImmutableList.copyOf; /** * Generates randomly selected element from collection/array * * @param <T> type of the collection's elements * * @author Tomas Klubal */ public class RandomFromCollectionGenerator<T> implements Generator<T> { private final List<T> items; private final Random random = new Random(); /** * Creates generator that selects values from <code>items</code> passed in * * @param items to select from * @throws NullPointerException when collection passed in is null */ public RandomFromCollectionGenerator(@Nonnull Collection<T> items) { this.items = copyOf(checkNotNull(items, "Collection for generation can't be null")); } /** * Creates generator that selects values from <code>items</code> passed in * * @param items to select from * @throws NullPointerException when array passed in is null */ @SafeVarargs public RandomFromCollectionGenerator(T... items) { this.items = copyOf(checkNotNull(items, "Collection for generation can't be null")); } @Override public T next() { int maximumIndex = items.size() - 1; return maximumIndex > 0 ? items.get(random.nextInt(maximumIndex)) : maximumIndex == 0 ? items.get(0) : null; } }
package io.generators.core; import javax.annotation.Nonnull; import java.util.Collection; import java.util.List; import java.util.Random; import static com.google.common.base.Preconditions.checkNotNull; import static com.google.common.collect.ImmutableList.copyOf; /** * Generates randomly selected element from collection/array * * @param <T> type of the collection's elements * @author Tomas Klubal */ public class RandomFromCollectionGenerator<T> implements Generator<T> { private final List<T> items; private final Random random = new Random(); /** * Creates generator that selects values from <code>items</code> passed in * * @param items to select from * @throws NullPointerException when collection passed in is null */ public RandomFromCollectionGenerator(@Nonnull Collection<T> items) { this.items = copyOf(checkNotNull(items, "Collection for generation can't be null")); } /** * Creates generator that selects values from <code>items</code> passed in * * @param items to select from * @throws NullPointerException when array passed in is null */ @SafeVarargs public RandomFromCollectionGenerator(T... items) { this.items = copyOf(checkNotNull(items, "Collection for generation can't be null")); } @Override public T next() { int maximumIndex = items.size() - 1; if (maximumIndex > 0) { return items.get(random.nextInt(maximumIndex)); } else if (maximumIndex == 0) { return items.get(0); } else { return null; } } }
7
4
2
mixed
--- a/generators-core/src/main/java/io/generators/core/RandomFromCollectionGenerator.java +++ b/generators-core/src/main/java/io/generators/core/RandomFromCollectionGenerator.java @@ -14,3 +14,2 @@ * @param <T> type of the collection's elements - * * @author Tomas Klubal @@ -45,5 +44,9 @@ int maximumIndex = items.size() - 1; - return maximumIndex > 0 - ? items.get(random.nextInt(maximumIndex)) - : maximumIndex == 0 ? items.get(0) : null; + if (maximumIndex > 0) { + return items.get(random.nextInt(maximumIndex)); + } else if (maximumIndex == 0) { + return items.get(0); + } else { + return null; + } }
--- a/generators-core/src/main/java/io/generators/core/RandomFromCollectionGenerator.java +++ b/generators-core/src/main/java/io/generators/core/RandomFromCollectionGenerator.java @@ ... @@ * @param <T> type of the collection's elements - * * @author Tomas Klubal @@ ... @@ int maximumIndex = items.size() - 1; - return maximumIndex > 0 - ? items.get(random.nextInt(maximumIndex)) - : maximumIndex == 0 ? items.get(0) : null; + if (maximumIndex > 0) { + return items.get(random.nextInt(maximumIndex)); + } else if (maximumIndex == 0) { + return items.get(0); + } else { + return null; + } }
--- a/generators-core/src/main/java/io/generators/core/RandomFromCollectionGenerator.java +++ b/generators-core/src/main/java/io/generators/core/RandomFromCollectionGenerator.java @@ -14,3 +14,2 @@ CON * @param <T> type of the collection's elements DEL * CON * @author Tomas Klubal @@ -45,5 +44,9 @@ CON int maximumIndex = items.size() - 1; DEL return maximumIndex > 0 DEL ? items.get(random.nextInt(maximumIndex)) DEL : maximumIndex == 0 ? items.get(0) : null; ADD if (maximumIndex > 0) { ADD return items.get(random.nextInt(maximumIndex)); ADD } else if (maximumIndex == 0) { ADD return items.get(0); ADD } else { ADD return null; ADD } CON }
<<<<<<< SEARCH * * @param <T> type of the collection's elements * * @author Tomas Klubal */ ======= * * @param <T> type of the collection's elements * @author Tomas Klubal */ >>>>>>> REPLACE <<<<<<< SEARCH public T next() { int maximumIndex = items.size() - 1; return maximumIndex > 0 ? items.get(random.nextInt(maximumIndex)) : maximumIndex == 0 ? items.get(0) : null; } } ======= public T next() { int maximumIndex = items.size() - 1; if (maximumIndex > 0) { return items.get(random.nextInt(maximumIndex)); } else if (maximumIndex == 0) { return items.get(0); } else { return null; } } } >>>>>>> REPLACE
jguerinet/mobile-string-parser
1d61a220eff060b419273ebfcff4e500d3a0e146
src/main/java/config/AnalyticsConfig.kt
kotlin
apache-2.0
Add types list to the Analytics config
/* * Copyright 2013-2018 Julien Guerinet * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ package config import com.guerinet.weave.config.Source import kotlinx.serialization.Optional import kotlinx.serialization.Serializable /** * Base parsed Config Json * @author Julien Guerinet * @since 5.0.0 */ @Serializable class AnalyticsConfig { /** List of [Source]s the Strings are coming from */ val sources: List<Source> = listOf() /** Path to the file to write to */ val path: String = "" /** Optional package name used on Android */ @Optional val packageName: String? = null /** Name of the column that holds the type */ val typeColumnName: String = "" /** Name of the column that holds the tag */ val tagColumnName: String = "" }
/* * Copyright 2013-2018 Julien Guerinet * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ package config import com.guerinet.weave.config.Source import kotlinx.serialization.Optional import kotlinx.serialization.Serializable /** * Base parsed Config Json * @author Julien Guerinet * @since 5.0.0 */ @Serializable class AnalyticsConfig { /** List of [Source]s the Strings are coming from */ val sources: List<Source> = listOf() /** Path to the file to write to */ val path: String = "" /** Optional package name used on Android */ @Optional val packageName: String? = null /** Name of the column that holds the type */ val typeColumnName: String = "" /** Name of the column that holds the tag */ val tagColumnName: String = "" /** Types that we should parse these into. Will typically be "Events" and "Screens" */ val types: List<String> = listOf() }
3
0
1
add_only
--- a/src/main/java/config/AnalyticsConfig.kt +++ b/src/main/java/config/AnalyticsConfig.kt @@ -46,2 +46,5 @@ val tagColumnName: String = "" + + /** Types that we should parse these into. Will typically be "Events" and "Screens" */ + val types: List<String> = listOf() }
--- a/src/main/java/config/AnalyticsConfig.kt +++ b/src/main/java/config/AnalyticsConfig.kt @@ ... @@ val tagColumnName: String = "" + + /** Types that we should parse these into. Will typically be "Events" and "Screens" */ + val types: List<String> = listOf() }
--- a/src/main/java/config/AnalyticsConfig.kt +++ b/src/main/java/config/AnalyticsConfig.kt @@ -46,2 +46,5 @@ CON val tagColumnName: String = "" ADD ADD /** Types that we should parse these into. Will typically be "Events" and "Screens" */ ADD val types: List<String> = listOf() CON }
<<<<<<< SEARCH /** Name of the column that holds the tag */ val tagColumnName: String = "" } ======= /** Name of the column that holds the tag */ val tagColumnName: String = "" /** Types that we should parse these into. Will typically be "Events" and "Screens" */ val types: List<String> = listOf() } >>>>>>> REPLACE
stewart/rff
5874d6bdb74c52ef375ebebf06d995e2e2cff7a5
src/fuzzy/score.rs
rust
mit
Implement PartialOrd, PartialEq for Score This allows Scores to be compared and sorted.
#[derive(Copy, Clone, Debug)] pub struct Score { /// The computed score value pub value: f32 } impl Score { /// Creates a new Score with the provided value /// /// # Examples /// /// ``` /// let score = rff::fuzzy::Score::new(1.0); /// assert_eq!(score.value, 1.0); /// ``` pub fn new(value: f32) -> Score { Score { value: value } } } #[cfg(test)] mod tests { use super::*; }
use std::cmp::Ordering; #[derive(Copy, Clone, Debug)] pub struct Score { /// The computed score value pub value: f32 } impl Score { /// Creates a new Score with the provided value /// /// # Examples /// /// ``` /// let score = rff::fuzzy::Score::new(1.0); /// assert_eq!(score.value, 1.0); /// ``` pub fn new(value: f32) -> Score { Score { value: value } } } impl PartialOrd for Score { fn partial_cmp(&self, other: &Score) -> Option<Ordering> { self.value.partial_cmp(&other.value) } } impl PartialEq for Score { fn eq(&self, other: &Score) -> bool { self.value == other.value } } #[cfg(test)] mod tests { use super::*; #[test] fn test_eq() { let a = Score::new(1.0); let b = Score::new(1.0); assert_eq!(a, b); } #[test] fn test_cmp() { let a = Score::new(2.0); let b = Score::new(1.0); assert!(a > b); assert!(b < a); let b = Score::new(2.0); assert!(a == b); } }
33
0
3
add_only
--- a/src/fuzzy/score.rs +++ b/src/fuzzy/score.rs @@ -1 +1,3 @@ +use std::cmp::Ordering; + #[derive(Copy, Clone, Debug)] @@ -22,2 +24,15 @@ +impl PartialOrd for Score { + fn partial_cmp(&self, other: &Score) -> Option<Ordering> { + self.value.partial_cmp(&other.value) + } +} + +impl PartialEq for Score { + fn eq(&self, other: &Score) -> bool { + self.value == other.value + } +} + + #[cfg(test)] @@ -25,2 +40,20 @@ use super::*; + + #[test] + fn test_eq() { + let a = Score::new(1.0); + let b = Score::new(1.0); + assert_eq!(a, b); + } + + #[test] + fn test_cmp() { + let a = Score::new(2.0); + let b = Score::new(1.0); + assert!(a > b); + assert!(b < a); + + let b = Score::new(2.0); + assert!(a == b); + } }
--- a/src/fuzzy/score.rs +++ b/src/fuzzy/score.rs @@ ... @@ +use std::cmp::Ordering; + #[derive(Copy, Clone, Debug)] @@ ... @@ +impl PartialOrd for Score { + fn partial_cmp(&self, other: &Score) -> Option<Ordering> { + self.value.partial_cmp(&other.value) + } +} + +impl PartialEq for Score { + fn eq(&self, other: &Score) -> bool { + self.value == other.value + } +} + + #[cfg(test)] @@ ... @@ use super::*; + + #[test] + fn test_eq() { + let a = Score::new(1.0); + let b = Score::new(1.0); + assert_eq!(a, b); + } + + #[test] + fn test_cmp() { + let a = Score::new(2.0); + let b = Score::new(1.0); + assert!(a > b); + assert!(b < a); + + let b = Score::new(2.0); + assert!(a == b); + } }
--- a/src/fuzzy/score.rs +++ b/src/fuzzy/score.rs @@ -1 +1,3 @@ ADD use std::cmp::Ordering; ADD CON #[derive(Copy, Clone, Debug)] @@ -22,2 +24,15 @@ CON ADD impl PartialOrd for Score { ADD fn partial_cmp(&self, other: &Score) -> Option<Ordering> { ADD self.value.partial_cmp(&other.value) ADD } ADD } ADD ADD impl PartialEq for Score { ADD fn eq(&self, other: &Score) -> bool { ADD self.value == other.value ADD } ADD } ADD ADD CON #[cfg(test)] @@ -25,2 +40,20 @@ CON use super::*; ADD ADD #[test] ADD fn test_eq() { ADD let a = Score::new(1.0); ADD let b = Score::new(1.0); ADD assert_eq!(a, b); ADD } ADD ADD #[test] ADD fn test_cmp() { ADD let a = Score::new(2.0); ADD let b = Score::new(1.0); ADD assert!(a > b); ADD assert!(b < a); ADD ADD let b = Score::new(2.0); ADD assert!(a == b); ADD } CON }
<<<<<<< SEARCH #[derive(Copy, Clone, Debug)] pub struct Score { ======= use std::cmp::Ordering; #[derive(Copy, Clone, Debug)] pub struct Score { >>>>>>> REPLACE <<<<<<< SEARCH } #[cfg(test)] mod tests { use super::*; } ======= } impl PartialOrd for Score { fn partial_cmp(&self, other: &Score) -> Option<Ordering> { self.value.partial_cmp(&other.value) } } impl PartialEq for Score { fn eq(&self, other: &Score) -> bool { self.value == other.value } } #[cfg(test)] mod tests { use super::*; #[test] fn test_eq() { let a = Score::new(1.0); let b = Score::new(1.0); assert_eq!(a, b); } #[test] fn test_cmp() { let a = Score::new(2.0); let b = Score::new(1.0); assert!(a > b); assert!(b < a); let b = Score::new(2.0); assert!(a == b); } } >>>>>>> REPLACE
brosner/django-notification
ede4689ce3f9e03db5f250617e793083333af3a5
notification/backends/email.py
python
mit
pluggable-backends: Use get_app over to include django-mailer support over a standard import and ImportError exception handling. git-svn-id: 12265af7f62f437cb19748843ef653b20b846039@130 590c3fc9-4838-0410-bb95-17a0c9b37ca9
from django.conf import settings from django.core.urlresolvers import reverse from django.template.loader import render_to_string from django.utils.translation import ugettext from django.contrib.sites.models import Site from notification import backends from notification.message import message_to_text # favour django-mailer but fall back to django.core.mail try: from mailer import send_mail except ImportError: from django.core.mail import send_mail class EmailBackend(backends.BaseBackend): def can_send(self, user, notice_type): if should_send(user, notice_type, "1") and user.email: return True return False def deliver(self, recipients, notice_type, message): notices_url = u"http://%s%s" % ( unicode(Site.objects.get_current()), reverse("notification_notices"), ) subject = render_to_string("notification/notification_subject.txt", { "display": ugettext(notice_type.display), }) message_body = render_to_string("notification/notification_body.txt", { "message": message_to_text(message), "notices_url": notices_url, "contact_email": settings.CONTACT_EMAIL, }) send_mail(subject, message_body, settings.DEFAULT_FROM_EMAIL, recipients)
from django.conf import settings from django.db.models.loading import get_app from django.core.urlresolvers import reverse from django.template.loader import render_to_string from django.utils.translation import ugettext from django.contrib.sites.models import Site from django.core.exceptions import ImproperlyConfigured from notification import backends from notification.message import message_to_text # favour django-mailer but fall back to django.core.mail try: mailer = get_app("mailer") from mailer import send_mail except ImproperlyConfigured: from django.core.mail import send_mail class EmailBackend(backends.BaseBackend): def can_send(self, user, notice_type): if should_send(user, notice_type, "1") and user.email: return True return False def deliver(self, recipients, notice_type, message): notices_url = u"http://%s%s" % ( unicode(Site.objects.get_current()), reverse("notification_notices"), ) subject = render_to_string("notification/notification_subject.txt", { "display": ugettext(notice_type.display), }) message_body = render_to_string("notification/notification_body.txt", { "message": message_to_text(message), "notices_url": notices_url, "contact_email": settings.CONTACT_EMAIL, }) send_mail(subject, message_body, settings.DEFAULT_FROM_EMAIL, recipients)
4
1
3
mixed
--- a/notification/backends/email.py +++ b/notification/backends/email.py @@ -2,2 +2,3 @@ from django.conf import settings +from django.db.models.loading import get_app from django.core.urlresolvers import reverse @@ -6,2 +7,3 @@ from django.contrib.sites.models import Site +from django.core.exceptions import ImproperlyConfigured @@ -12,4 +14,5 @@ try: + mailer = get_app("mailer") from mailer import send_mail -except ImportError: +except ImproperlyConfigured: from django.core.mail import send_mail
--- a/notification/backends/email.py +++ b/notification/backends/email.py @@ ... @@ from django.conf import settings +from django.db.models.loading import get_app from django.core.urlresolvers import reverse @@ ... @@ from django.contrib.sites.models import Site +from django.core.exceptions import ImproperlyConfigured @@ ... @@ try: + mailer = get_app("mailer") from mailer import send_mail -except ImportError: +except ImproperlyConfigured: from django.core.mail import send_mail
--- a/notification/backends/email.py +++ b/notification/backends/email.py @@ -2,2 +2,3 @@ CON from django.conf import settings ADD from django.db.models.loading import get_app CON from django.core.urlresolvers import reverse @@ -6,2 +7,3 @@ CON from django.contrib.sites.models import Site ADD from django.core.exceptions import ImproperlyConfigured CON @@ -12,4 +14,5 @@ CON try: ADD mailer = get_app("mailer") CON from mailer import send_mail DEL except ImportError: ADD except ImproperlyConfigured: CON from django.core.mail import send_mail
<<<<<<< SEARCH from django.conf import settings from django.core.urlresolvers import reverse from django.template.loader import render_to_string from django.utils.translation import ugettext from django.contrib.sites.models import Site from notification import backends ======= from django.conf import settings from django.db.models.loading import get_app from django.core.urlresolvers import reverse from django.template.loader import render_to_string from django.utils.translation import ugettext from django.contrib.sites.models import Site from django.core.exceptions import ImproperlyConfigured from notification import backends >>>>>>> REPLACE <<<<<<< SEARCH # favour django-mailer but fall back to django.core.mail try: from mailer import send_mail except ImportError: from django.core.mail import send_mail ======= # favour django-mailer but fall back to django.core.mail try: mailer = get_app("mailer") from mailer import send_mail except ImproperlyConfigured: from django.core.mail import send_mail >>>>>>> REPLACE
google/site-kit-wp
d5033d012240ba9e3617a73b6efcefd0f42259d1
assets/js/components/root/index.js
javascript
apache-2.0
Update Root with CollectModuleData for legacy compat.
/** * Root component. * * Site Kit by Google, Copyright 2020 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ /** * External dependencies */ import PropTypes from 'prop-types'; /** * Internal dependencies */ import Data from 'googlesitekit-data'; import ErrorHandler from '../ErrorHandler'; import PermissionsModal from '../permissions-modal'; export default function Root( { children, registry } ) { return ( <Data.RegistryProvider value={ registry }> <ErrorHandler> { children } <PermissionsModal /> </ErrorHandler> </Data.RegistryProvider> ); } Root.propTypes = { children: PropTypes.node.isRequired, registry: PropTypes.object, }; Root.defaultProps = { registry: Data, };
/** * Root component. * * Site Kit by Google, Copyright 2020 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ /** * External dependencies */ import PropTypes from 'prop-types'; /** * Internal dependencies */ import Data from 'googlesitekit-data'; import ErrorHandler from '../ErrorHandler'; import PermissionsModal from '../permissions-modal'; import RestoreSnapshots from '../restore-snapshots'; import CollectModuleData from '../data/collect-module-data'; export default function Root( { children, registry, dataAPIContext, dataAPIModuleArgs, } ) { return ( <Data.RegistryProvider value={ registry }> <ErrorHandler> <RestoreSnapshots> { children } { dataAPIContext && ( // Legacy dataAPI support. <CollectModuleData context={ dataAPIContext } args={ dataAPIModuleArgs } /> ) } </RestoreSnapshots> <PermissionsModal /> </ErrorHandler> </Data.RegistryProvider> ); } Root.propTypes = { children: PropTypes.node.isRequired, registry: PropTypes.object, }; Root.defaultProps = { registry: Data, };
15
2
2
mixed
--- a/assets/js/components/root/index.js +++ b/assets/js/components/root/index.js @@ -29,4 +29,11 @@ import PermissionsModal from '../permissions-modal'; +import RestoreSnapshots from '../restore-snapshots'; +import CollectModuleData from '../data/collect-module-data'; -export default function Root( { children, registry } ) { +export default function Root( { + children, + registry, + dataAPIContext, + dataAPIModuleArgs, +} ) { return ( @@ -34,3 +41,9 @@ <ErrorHandler> - { children } + <RestoreSnapshots> + { children } + { dataAPIContext && ( + // Legacy dataAPI support. + <CollectModuleData context={ dataAPIContext } args={ dataAPIModuleArgs } /> + ) } + </RestoreSnapshots> <PermissionsModal />
--- a/assets/js/components/root/index.js +++ b/assets/js/components/root/index.js @@ ... @@ import PermissionsModal from '../permissions-modal'; +import RestoreSnapshots from '../restore-snapshots'; +import CollectModuleData from '../data/collect-module-data'; -export default function Root( { children, registry } ) { +export default function Root( { + children, + registry, + dataAPIContext, + dataAPIModuleArgs, +} ) { return ( @@ ... @@ <ErrorHandler> - { children } + <RestoreSnapshots> + { children } + { dataAPIContext && ( + // Legacy dataAPI support. + <CollectModuleData context={ dataAPIContext } args={ dataAPIModuleArgs } /> + ) } + </RestoreSnapshots> <PermissionsModal />
--- a/assets/js/components/root/index.js +++ b/assets/js/components/root/index.js @@ -29,4 +29,11 @@ CON import PermissionsModal from '../permissions-modal'; ADD import RestoreSnapshots from '../restore-snapshots'; ADD import CollectModuleData from '../data/collect-module-data'; CON DEL export default function Root( { children, registry } ) { ADD export default function Root( { ADD children, ADD registry, ADD dataAPIContext, ADD dataAPIModuleArgs, ADD } ) { CON return ( @@ -34,3 +41,9 @@ CON <ErrorHandler> DEL { children } ADD <RestoreSnapshots> ADD { children } ADD { dataAPIContext && ( ADD // Legacy dataAPI support. ADD <CollectModuleData context={ dataAPIContext } args={ dataAPIModuleArgs } /> ADD ) } ADD </RestoreSnapshots> CON <PermissionsModal />
<<<<<<< SEARCH import ErrorHandler from '../ErrorHandler'; import PermissionsModal from '../permissions-modal'; export default function Root( { children, registry } ) { return ( <Data.RegistryProvider value={ registry }> <ErrorHandler> { children } <PermissionsModal /> </ErrorHandler> ======= import ErrorHandler from '../ErrorHandler'; import PermissionsModal from '../permissions-modal'; import RestoreSnapshots from '../restore-snapshots'; import CollectModuleData from '../data/collect-module-data'; export default function Root( { children, registry, dataAPIContext, dataAPIModuleArgs, } ) { return ( <Data.RegistryProvider value={ registry }> <ErrorHandler> <RestoreSnapshots> { children } { dataAPIContext && ( // Legacy dataAPI support. <CollectModuleData context={ dataAPIContext } args={ dataAPIModuleArgs } /> ) } </RestoreSnapshots> <PermissionsModal /> </ErrorHandler> >>>>>>> REPLACE
dfarr/fathertime
23991aaef3ec1d9be8de478d799acb100b9501d1
server.js
javascript
mit
Create basic time regex and stub out flow
var config = require('./config.js'); var Slack = require('slack-client'); var token = config.slackApiKey; var slack = new Slack(token, true, true); slack.on('message', function(message) { var user = slack.getUserByID(message.user); var channel = slack.getChannelGroupOrDMByID(message.channel); if (message.type === 'message' && channel.name === 'bottesting') { console.log(channel.name + ' ' + user.name + ':' + message.text); channel.send('Hey there'); } }); slack.login();
var config = require('./config.js'); var Slack = require('slack-client'); var token = config.slackApiKey; var slack = new Slack(token, true, true); slack.on('message', function(message) { var user = slack.getUserByID(message.user); var channel = slack.getChannelGroupOrDMByID(message.channel); // if (message.type === 'message' // && channel.name === 'bottesting') { // console.log(channel.name + ' ' + user.name + ':' + message.text); // channel.send('Hey there'); // } var regex = /([0-9]{1,2}):([0-9]{2})(am|pm)?/gi; var match = regex.exec(message); console.log(match); if(match && message.type === 'message' && channel.name === 'bottesting') { var time = new Date(); var hour = parseInt(match[1], 10); var mins = parseInt(match[2], 10); if(match[3] && match[3].toLowerCase() === 'pm') { hour = hour + 12; } time.setHours(hour); time.setMinutes(mins); channel.send(time); // User.findOne({uuid: message.user}, function(err, user) { // if(err) { // return; // } // if(!user.timezone) { // // ask user for timezone // return; // } // channel.users.forEach(function (user) { // User.findOne({uuid: user.id}, function(err, user) { // if(!err && user.timezone) { // channel.sendDM(time, user.id); // } // }); // }); // }); } }); slack.login();
54
5
1
mixed
--- a/server.js +++ b/server.js @@ -7,8 +7,57 @@ var user = slack.getUserByID(message.user); - var channel = slack.getChannelGroupOrDMByID(message.channel); + var channel = slack.getChannelGroupOrDMByID(message.channel); - if (message.type === 'message' - && channel.name === 'bottesting') { - console.log(channel.name + ' ' + user.name + ':' + message.text); - channel.send('Hey there'); + // if (message.type === 'message' + // && channel.name === 'bottesting') { + // console.log(channel.name + ' ' + user.name + ':' + message.text); + // channel.send('Hey there'); + // } + + var regex = /([0-9]{1,2}):([0-9]{2})(am|pm)?/gi; + + var match = regex.exec(message); + + console.log(match); + + if(match && message.type === 'message' && channel.name === 'bottesting') { + + var time = new Date(); + + var hour = parseInt(match[1], 10); + var mins = parseInt(match[2], 10); + + if(match[3] && match[3].toLowerCase() === 'pm') { + hour = hour + 12; + } + + time.setHours(hour); + time.setMinutes(mins); + + channel.send(time); + + // User.findOne({uuid: message.user}, function(err, user) { + + // if(err) { + // return; + // } + + // if(!user.timezone) { + // // ask user for timezone + // return; + // } + + // channel.users.forEach(function (user) { + + // User.findOne({uuid: user.id}, function(err, user) { + + // if(!err && user.timezone) { + // channel.sendDM(time, user.id); + // } + + // }); + + // }); + + // }); + }
--- a/server.js +++ b/server.js @@ ... @@ var user = slack.getUserByID(message.user); - var channel = slack.getChannelGroupOrDMByID(message.channel); + var channel = slack.getChannelGroupOrDMByID(message.channel); - if (message.type === 'message' - && channel.name === 'bottesting') { - console.log(channel.name + ' ' + user.name + ':' + message.text); - channel.send('Hey there'); + // if (message.type === 'message' + // && channel.name === 'bottesting') { + // console.log(channel.name + ' ' + user.name + ':' + message.text); + // channel.send('Hey there'); + // } + + var regex = /([0-9]{1,2}):([0-9]{2})(am|pm)?/gi; + + var match = regex.exec(message); + + console.log(match); + + if(match && message.type === 'message' && channel.name === 'bottesting') { + + var time = new Date(); + + var hour = parseInt(match[1], 10); + var mins = parseInt(match[2], 10); + + if(match[3] && match[3].toLowerCase() === 'pm') { + hour = hour + 12; + } + + time.setHours(hour); + time.setMinutes(mins); + + channel.send(time); + + // User.findOne({uuid: message.user}, function(err, user) { + + // if(err) { + // return; + // } + + // if(!user.timezone) { + // // ask user for timezone + // return; + // } + + // channel.users.forEach(function (user) { + + // User.findOne({uuid: user.id}, function(err, user) { + + // if(!err && user.timezone) { + // channel.sendDM(time, user.id); + // } + + // }); + + // }); + + // }); + }
--- a/server.js +++ b/server.js @@ -7,8 +7,57 @@ CON var user = slack.getUserByID(message.user); DEL var channel = slack.getChannelGroupOrDMByID(message.channel); ADD var channel = slack.getChannelGroupOrDMByID(message.channel); CON DEL if (message.type === 'message' DEL && channel.name === 'bottesting') { DEL console.log(channel.name + ' ' + user.name + ':' + message.text); DEL channel.send('Hey there'); ADD // if (message.type === 'message' ADD // && channel.name === 'bottesting') { ADD // console.log(channel.name + ' ' + user.name + ':' + message.text); ADD // channel.send('Hey there'); ADD // } ADD ADD var regex = /([0-9]{1,2}):([0-9]{2})(am|pm)?/gi; ADD ADD var match = regex.exec(message); ADD ADD console.log(match); ADD ADD if(match && message.type === 'message' && channel.name === 'bottesting') { ADD ADD var time = new Date(); ADD ADD var hour = parseInt(match[1], 10); ADD var mins = parseInt(match[2], 10); ADD ADD if(match[3] && match[3].toLowerCase() === 'pm') { ADD hour = hour + 12; ADD } ADD ADD time.setHours(hour); ADD time.setMinutes(mins); ADD ADD channel.send(time); ADD ADD // User.findOne({uuid: message.user}, function(err, user) { ADD ADD // if(err) { ADD // return; ADD // } ADD ADD // if(!user.timezone) { ADD // // ask user for timezone ADD // return; ADD // } ADD ADD // channel.users.forEach(function (user) { ADD ADD // User.findOne({uuid: user.id}, function(err, user) { ADD ADD // if(!err && user.timezone) { ADD // channel.sendDM(time, user.id); ADD // } ADD ADD // }); ADD ADD // }); ADD ADD // }); ADD CON }
<<<<<<< SEARCH slack.on('message', function(message) { var user = slack.getUserByID(message.user); var channel = slack.getChannelGroupOrDMByID(message.channel); if (message.type === 'message' && channel.name === 'bottesting') { console.log(channel.name + ' ' + user.name + ':' + message.text); channel.send('Hey there'); } }); ======= slack.on('message', function(message) { var user = slack.getUserByID(message.user); var channel = slack.getChannelGroupOrDMByID(message.channel); // if (message.type === 'message' // && channel.name === 'bottesting') { // console.log(channel.name + ' ' + user.name + ':' + message.text); // channel.send('Hey there'); // } var regex = /([0-9]{1,2}):([0-9]{2})(am|pm)?/gi; var match = regex.exec(message); console.log(match); if(match && message.type === 'message' && channel.name === 'bottesting') { var time = new Date(); var hour = parseInt(match[1], 10); var mins = parseInt(match[2], 10); if(match[3] && match[3].toLowerCase() === 'pm') { hour = hour + 12; } time.setHours(hour); time.setMinutes(mins); channel.send(time); // User.findOne({uuid: message.user}, function(err, user) { // if(err) { // return; // } // if(!user.timezone) { // // ask user for timezone // return; // } // channel.users.forEach(function (user) { // User.findOne({uuid: user.id}, function(err, user) { // if(!err && user.timezone) { // channel.sendDM(time, user.id); // } // }); // }); // }); } }); >>>>>>> REPLACE
zhangxin840/web-snapshot
3ac79f63c258c2d49943b0c4ad8c2a1ef0f3315f
snapshot.js
javascript
mit
Use fs to output file.
var utils = require('./tools/utils') var shellRunner = require('./tools/shellRunner').setCommand('wkhtmltoimage'); var imagesPath = 'public/snapshots/'; var take = function (url) { var options = { output: imagesPath + utils.getMd5(url) + '.jpg', quality: 100 }; console.log('run', url, options); var wkhtmltoimage = shellRunner.run(url, options); wkhtmltoimage.stdout.on('data', (data) => { console.log(`stdout: ${data}`); }); wkhtmltoimage.stderr.on('data', (data) => { console.log(`stderr: ${data}`); }); wkhtmltoimage.on('close', (code) => { console.log(`child process exited with code ${code}`); }); return child; }; module.exports = { take: take };
var fs = require('fs'); var utils = require('./tools/utils'); var shellRunner = require('./tools/shellRunner').setCommand('wkhtmltoimage'); var getFilePath = function (url, options) { var imagesPath = 'public/snapshots/'; var format = '.jpg'; return imagesPath + utils.getMd5(url) + format; }; var take = function (url, options) { var options = options || { quality: 100 }; var filePath = getFilePath(url, options); var childProcess; console.log('Start snapshot', url, options, filePath); childProcess = shellRunner.run(url, options); childProcess.stdout.pipe(fs.createWriteStream(filePath)); // childProcess.stdout.on('data', (data) => { // console.log(`stdout: ${data}`); // }); childProcess.stderr.on('data', (data) => { console.log(`stderr: ${data}`); }); childProcess.on('close', (code) => { console.log(`child process exited with code ${code}`); }); return childProcess; }; module.exports = { take: take };
24
16
1
mixed
--- a/snapshot.js +++ b/snapshot.js @@ -1,28 +1,36 @@ -var utils = require('./tools/utils') +var fs = require('fs'); +var utils = require('./tools/utils'); var shellRunner = require('./tools/shellRunner').setCommand('wkhtmltoimage'); -var imagesPath = 'public/snapshots/'; -var take = function (url) { - var options = { - output: imagesPath + utils.getMd5(url) + '.jpg', +var getFilePath = function (url, options) { + var imagesPath = 'public/snapshots/'; + var format = '.jpg'; + return imagesPath + utils.getMd5(url) + format; +}; + +var take = function (url, options) { + var options = options || { quality: 100 }; + var filePath = getFilePath(url, options); + var childProcess; - console.log('run', url, options); + console.log('Start snapshot', url, options, filePath); - var wkhtmltoimage = shellRunner.run(url, options); + childProcess = shellRunner.run(url, options); + childProcess.stdout.pipe(fs.createWriteStream(filePath)); - wkhtmltoimage.stdout.on('data', (data) => { - console.log(`stdout: ${data}`); + // childProcess.stdout.on('data', (data) => { + // console.log(`stdout: ${data}`); + // }); + + childProcess.stderr.on('data', (data) => { + console.log(`stderr: ${data}`); }); - wkhtmltoimage.stderr.on('data', (data) => { - console.log(`stderr: ${data}`); + childProcess.on('close', (code) => { + console.log(`child process exited with code ${code}`); }); - wkhtmltoimage.on('close', (code) => { - console.log(`child process exited with code ${code}`); - }); - - return child; + return childProcess; };
--- a/snapshot.js +++ b/snapshot.js @@ ... @@ -var utils = require('./tools/utils') +var fs = require('fs'); +var utils = require('./tools/utils'); var shellRunner = require('./tools/shellRunner').setCommand('wkhtmltoimage'); -var imagesPath = 'public/snapshots/'; -var take = function (url) { - var options = { - output: imagesPath + utils.getMd5(url) + '.jpg', +var getFilePath = function (url, options) { + var imagesPath = 'public/snapshots/'; + var format = '.jpg'; + return imagesPath + utils.getMd5(url) + format; +}; + +var take = function (url, options) { + var options = options || { quality: 100 }; + var filePath = getFilePath(url, options); + var childProcess; - console.log('run', url, options); + console.log('Start snapshot', url, options, filePath); - var wkhtmltoimage = shellRunner.run(url, options); + childProcess = shellRunner.run(url, options); + childProcess.stdout.pipe(fs.createWriteStream(filePath)); - wkhtmltoimage.stdout.on('data', (data) => { - console.log(`stdout: ${data}`); + // childProcess.stdout.on('data', (data) => { + // console.log(`stdout: ${data}`); + // }); + + childProcess.stderr.on('data', (data) => { + console.log(`stderr: ${data}`); }); - wkhtmltoimage.stderr.on('data', (data) => { - console.log(`stderr: ${data}`); + childProcess.on('close', (code) => { + console.log(`child process exited with code ${code}`); }); - wkhtmltoimage.on('close', (code) => { - console.log(`child process exited with code ${code}`); - }); - - return child; + return childProcess; };
--- a/snapshot.js +++ b/snapshot.js @@ -1,28 +1,36 @@ DEL var utils = require('./tools/utils') ADD var fs = require('fs'); ADD var utils = require('./tools/utils'); CON var shellRunner = require('./tools/shellRunner').setCommand('wkhtmltoimage'); DEL var imagesPath = 'public/snapshots/'; CON DEL var take = function (url) { DEL var options = { DEL output: imagesPath + utils.getMd5(url) + '.jpg', ADD var getFilePath = function (url, options) { ADD var imagesPath = 'public/snapshots/'; ADD var format = '.jpg'; ADD return imagesPath + utils.getMd5(url) + format; ADD }; ADD ADD var take = function (url, options) { ADD var options = options || { CON quality: 100 CON }; ADD var filePath = getFilePath(url, options); ADD var childProcess; CON DEL console.log('run', url, options); ADD console.log('Start snapshot', url, options, filePath); CON DEL var wkhtmltoimage = shellRunner.run(url, options); ADD childProcess = shellRunner.run(url, options); ADD childProcess.stdout.pipe(fs.createWriteStream(filePath)); CON DEL wkhtmltoimage.stdout.on('data', (data) => { DEL console.log(`stdout: ${data}`); ADD // childProcess.stdout.on('data', (data) => { ADD // console.log(`stdout: ${data}`); ADD // }); ADD ADD childProcess.stderr.on('data', (data) => { ADD console.log(`stderr: ${data}`); CON }); CON DEL wkhtmltoimage.stderr.on('data', (data) => { DEL console.log(`stderr: ${data}`); ADD childProcess.on('close', (code) => { ADD console.log(`child process exited with code ${code}`); CON }); CON DEL wkhtmltoimage.on('close', (code) => { DEL console.log(`child process exited with code ${code}`); DEL }); DEL DEL return child; ADD return childProcess; CON };
<<<<<<< SEARCH var utils = require('./tools/utils') var shellRunner = require('./tools/shellRunner').setCommand('wkhtmltoimage'); var imagesPath = 'public/snapshots/'; var take = function (url) { var options = { output: imagesPath + utils.getMd5(url) + '.jpg', quality: 100 }; console.log('run', url, options); var wkhtmltoimage = shellRunner.run(url, options); wkhtmltoimage.stdout.on('data', (data) => { console.log(`stdout: ${data}`); }); wkhtmltoimage.stderr.on('data', (data) => { console.log(`stderr: ${data}`); }); wkhtmltoimage.on('close', (code) => { console.log(`child process exited with code ${code}`); }); return child; }; ======= var fs = require('fs'); var utils = require('./tools/utils'); var shellRunner = require('./tools/shellRunner').setCommand('wkhtmltoimage'); var getFilePath = function (url, options) { var imagesPath = 'public/snapshots/'; var format = '.jpg'; return imagesPath + utils.getMd5(url) + format; }; var take = function (url, options) { var options = options || { quality: 100 }; var filePath = getFilePath(url, options); var childProcess; console.log('Start snapshot', url, options, filePath); childProcess = shellRunner.run(url, options); childProcess.stdout.pipe(fs.createWriteStream(filePath)); // childProcess.stdout.on('data', (data) => { // console.log(`stdout: ${data}`); // }); childProcess.stderr.on('data', (data) => { console.log(`stderr: ${data}`); }); childProcess.on('close', (code) => { console.log(`child process exited with code ${code}`); }); return childProcess; }; >>>>>>> REPLACE
exekias/django-achilles
78cca16df6a5cdd90ec92e64455215c4b7292fae
report_coverage.py
python
apache-2.0
Sort line coverage info when reporting
#!/usr/bin/env python # coding: utf-8 import json import os import sys from coveralls import Coveralls, cli # Patch coveralls to get javascript coverage from mocha orig_get_coverage = Coveralls.get_coverage def get_coverage_with_js(self): report = orig_get_coverage(self) js_files = json.load(open('.coverage-js'))['files'] js_report = [] for f in js_files: source = '\n'.join(open(f['filename']).readlines()) name = os.path.relpath(f['filename']) coverage = [] for v in f['source'].values(): coverage.append(v['coverage'] if v['coverage'] != '' else None) js_report.append({ 'source': source, 'name': name, 'coverage': coverage} ) report += js_report return report Coveralls.get_coverage = get_coverage_with_js cli.main(sys.argv[1:])
#!/usr/bin/env python # coding: utf-8 import json import os import sys from coveralls import Coveralls, cli # Patch coveralls to get javascript coverage from mocha orig_get_coverage = Coveralls.get_coverage def get_coverage_with_js(self): report = orig_get_coverage(self) js_files = json.load(open('.coverage-js'))['files'] js_report = [] for f in js_files: source = '\n'.join(open(f['filename']).readlines()) name = os.path.relpath(f['filename']) coverage = [] # Create sorted coverage array from original dict for k, v in sorted(f['source'].items(), key=lambda x:int(x[0])): coverage.append(v['coverage'] if v['coverage'] != '' else None) js_report.append({ 'source': source, 'name': name, 'coverage': coverage} ) report += js_report return report Coveralls.get_coverage = get_coverage_with_js cli.main(sys.argv[1:])
3
1
1
mixed
--- a/report_coverage.py +++ b/report_coverage.py @@ -23,3 +23,5 @@ coverage = [] - for v in f['source'].values(): + + # Create sorted coverage array from original dict + for k, v in sorted(f['source'].items(), key=lambda x:int(x[0])): coverage.append(v['coverage'] if v['coverage'] != '' else None)
--- a/report_coverage.py +++ b/report_coverage.py @@ ... @@ coverage = [] - for v in f['source'].values(): + + # Create sorted coverage array from original dict + for k, v in sorted(f['source'].items(), key=lambda x:int(x[0])): coverage.append(v['coverage'] if v['coverage'] != '' else None)
--- a/report_coverage.py +++ b/report_coverage.py @@ -23,3 +23,5 @@ CON coverage = [] DEL for v in f['source'].values(): ADD ADD # Create sorted coverage array from original dict ADD for k, v in sorted(f['source'].items(), key=lambda x:int(x[0])): CON coverage.append(v['coverage'] if v['coverage'] != '' else None)
<<<<<<< SEARCH name = os.path.relpath(f['filename']) coverage = [] for v in f['source'].values(): coverage.append(v['coverage'] if v['coverage'] != '' else None) ======= name = os.path.relpath(f['filename']) coverage = [] # Create sorted coverage array from original dict for k, v in sorted(f['source'].items(), key=lambda x:int(x[0])): coverage.append(v['coverage'] if v['coverage'] != '' else None) >>>>>>> REPLACE
gradle/gradle
05d53b8bc13fe7e811cd3d1e2d7744656f937318
subprojects/kotlin-dsl-tooling-builders/src/main/kotlin/org/gradle/kotlin/dsl/tooling/builders/KotlinScriptingModelBuildersRegistrationAction.kt
kotlin
apache-2.0
Revert "Set `prepareKotlinBuildScriptModel` task group explicitly" This reverts commit 853397e86cb8f888e34d290c953cdbab12b00a1f.
/* * Copyright 2016 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.gradle.kotlin.dsl.tooling.builders import org.gradle.api.internal.project.ProjectInternal import org.gradle.configuration.project.ProjectConfigureAction import org.gradle.kotlin.dsl.resolver.kotlinBuildScriptModelTask import org.gradle.kotlin.dsl.support.serviceOf import org.gradle.tooling.provider.model.ToolingModelBuilderRegistry class KotlinScriptingModelBuildersRegistrationAction : ProjectConfigureAction { override fun execute(project: ProjectInternal) { project.serviceOf<ToolingModelBuilderRegistry>().apply { register(KotlinBuildScriptModelBuilder) register(KotlinBuildScriptTemplateModelBuilder) } project.tasks.apply { register(kotlinBuildScriptModelTask) { it.group = "other" } } } }
/* * Copyright 2016 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.gradle.kotlin.dsl.tooling.builders import org.gradle.api.internal.project.ProjectInternal import org.gradle.configuration.project.ProjectConfigureAction import org.gradle.kotlin.dsl.resolver.kotlinBuildScriptModelTask import org.gradle.kotlin.dsl.support.serviceOf import org.gradle.tooling.provider.model.ToolingModelBuilderRegistry class KotlinScriptingModelBuildersRegistrationAction : ProjectConfigureAction { override fun execute(project: ProjectInternal) { project.serviceOf<ToolingModelBuilderRegistry>().apply { register(KotlinBuildScriptModelBuilder) register(KotlinBuildScriptTemplateModelBuilder) } project.tasks.apply { register(kotlinBuildScriptModelTask) } } }
1
3
1
mixed
--- a/subprojects/kotlin-dsl-tooling-builders/src/main/kotlin/org/gradle/kotlin/dsl/tooling/builders/KotlinScriptingModelBuildersRegistrationAction.kt +++ b/subprojects/kotlin-dsl-tooling-builders/src/main/kotlin/org/gradle/kotlin/dsl/tooling/builders/KotlinScriptingModelBuildersRegistrationAction.kt @@ -35,5 +35,3 @@ project.tasks.apply { - register(kotlinBuildScriptModelTask) { - it.group = "other" - } + register(kotlinBuildScriptModelTask) }
--- a/subprojects/kotlin-dsl-tooling-builders/src/main/kotlin/org/gradle/kotlin/dsl/tooling/builders/KotlinScriptingModelBuildersRegistrationAction.kt +++ b/subprojects/kotlin-dsl-tooling-builders/src/main/kotlin/org/gradle/kotlin/dsl/tooling/builders/KotlinScriptingModelBuildersRegistrationAction.kt @@ ... @@ project.tasks.apply { - register(kotlinBuildScriptModelTask) { - it.group = "other" - } + register(kotlinBuildScriptModelTask) }
--- a/subprojects/kotlin-dsl-tooling-builders/src/main/kotlin/org/gradle/kotlin/dsl/tooling/builders/KotlinScriptingModelBuildersRegistrationAction.kt +++ b/subprojects/kotlin-dsl-tooling-builders/src/main/kotlin/org/gradle/kotlin/dsl/tooling/builders/KotlinScriptingModelBuildersRegistrationAction.kt @@ -35,5 +35,3 @@ CON project.tasks.apply { DEL register(kotlinBuildScriptModelTask) { DEL it.group = "other" DEL } ADD register(kotlinBuildScriptModelTask) CON }
<<<<<<< SEARCH } project.tasks.apply { register(kotlinBuildScriptModelTask) { it.group = "other" } } } ======= } project.tasks.apply { register(kotlinBuildScriptModelTask) } } >>>>>>> REPLACE
dorayakikun/alfred_jira_workflow
305e666479d3cf01dd016ed57a9ba84cc2fdf3de
src/main.rs
rust
mit
:art: Make possible to take argument in subcommand
extern crate clap; extern crate hyper; #[macro_use] extern crate serde_derive; extern crate serde_json; extern crate toml; mod config; mod issue; mod jira_api; mod jira_client; mod jira_request; mod search_command; mod search_response; mod workflow; use clap::{App, SubCommand}; fn main() { let workflow = workflow::new(); let matches = App::new("jira") .version(env!("CARGO_PKG_VERSION")) .about("Alfred JIRA Workflow.") .subcommand(SubCommand::with_name("Search") .about("Search JIRA issues")) .get_matches(); if let Some(_) = matches.subcommand_matches("search") { match workflow.seach().run(&"".to_string()) { Ok(_) => (), Err(e) => { println!("{}", e.to_string()); std::process::exit(1); } } std::process::exit(0); }; std::process::exit(0); }
extern crate clap; extern crate hyper; #[macro_use] extern crate serde_derive; extern crate serde_json; extern crate toml; mod config; mod issue; mod jira_api; mod jira_client; mod jira_request; mod search_command; mod search_response; mod workflow; use clap::{Arg, App, SubCommand}; fn main() { let workflow = workflow::new(); let matches = App::new("jira") .version(env!("CARGO_PKG_VERSION")) .about("Alfred JIRA Workflow.") .subcommand(SubCommand::with_name("search") .about("Search JIRA issues") .arg(Arg::with_name("keyword") .required(true))) .get_matches(); if let Some(matches) = matches.subcommand_matches("search") { if let Some(keyword) = matches.value_of("keyword") { match workflow.search().run(&keyword.to_string()) { Ok(_) => (), Err(e) => { println!("{}", e.to_string()); std::process::exit(1); } } } std::process::exit(0); }; std::process::exit(0); }
13
9
2
mixed
--- a/src/main.rs +++ b/src/main.rs @@ -16,3 +16,3 @@ -use clap::{App, SubCommand}; +use clap::{Arg, App, SubCommand}; @@ -23,12 +23,16 @@ .about("Alfred JIRA Workflow.") - .subcommand(SubCommand::with_name("Search") - .about("Search JIRA issues")) + .subcommand(SubCommand::with_name("search") + .about("Search JIRA issues") + .arg(Arg::with_name("keyword") + .required(true))) .get_matches(); - if let Some(_) = matches.subcommand_matches("search") { - match workflow.seach().run(&"".to_string()) { - Ok(_) => (), - Err(e) => { - println!("{}", e.to_string()); - std::process::exit(1); + if let Some(matches) = matches.subcommand_matches("search") { + if let Some(keyword) = matches.value_of("keyword") { + match workflow.search().run(&keyword.to_string()) { + Ok(_) => (), + Err(e) => { + println!("{}", e.to_string()); + std::process::exit(1); + } }
--- a/src/main.rs +++ b/src/main.rs @@ ... @@ -use clap::{App, SubCommand}; +use clap::{Arg, App, SubCommand}; @@ ... @@ .about("Alfred JIRA Workflow.") - .subcommand(SubCommand::with_name("Search") - .about("Search JIRA issues")) + .subcommand(SubCommand::with_name("search") + .about("Search JIRA issues") + .arg(Arg::with_name("keyword") + .required(true))) .get_matches(); - if let Some(_) = matches.subcommand_matches("search") { - match workflow.seach().run(&"".to_string()) { - Ok(_) => (), - Err(e) => { - println!("{}", e.to_string()); - std::process::exit(1); + if let Some(matches) = matches.subcommand_matches("search") { + if let Some(keyword) = matches.value_of("keyword") { + match workflow.search().run(&keyword.to_string()) { + Ok(_) => (), + Err(e) => { + println!("{}", e.to_string()); + std::process::exit(1); + } }
--- a/src/main.rs +++ b/src/main.rs @@ -16,3 +16,3 @@ CON DEL use clap::{App, SubCommand}; ADD use clap::{Arg, App, SubCommand}; CON @@ -23,12 +23,16 @@ CON .about("Alfred JIRA Workflow.") DEL .subcommand(SubCommand::with_name("Search") DEL .about("Search JIRA issues")) ADD .subcommand(SubCommand::with_name("search") ADD .about("Search JIRA issues") ADD .arg(Arg::with_name("keyword") ADD .required(true))) CON .get_matches(); CON DEL if let Some(_) = matches.subcommand_matches("search") { DEL match workflow.seach().run(&"".to_string()) { DEL Ok(_) => (), DEL Err(e) => { DEL println!("{}", e.to_string()); DEL std::process::exit(1); ADD if let Some(matches) = matches.subcommand_matches("search") { ADD if let Some(keyword) = matches.value_of("keyword") { ADD match workflow.search().run(&keyword.to_string()) { ADD Ok(_) => (), ADD Err(e) => { ADD println!("{}", e.to_string()); ADD std::process::exit(1); ADD } CON }
<<<<<<< SEARCH mod workflow; use clap::{App, SubCommand}; fn main() { ======= mod workflow; use clap::{Arg, App, SubCommand}; fn main() { >>>>>>> REPLACE <<<<<<< SEARCH .version(env!("CARGO_PKG_VERSION")) .about("Alfred JIRA Workflow.") .subcommand(SubCommand::with_name("Search") .about("Search JIRA issues")) .get_matches(); if let Some(_) = matches.subcommand_matches("search") { match workflow.seach().run(&"".to_string()) { Ok(_) => (), Err(e) => { println!("{}", e.to_string()); std::process::exit(1); } } ======= .version(env!("CARGO_PKG_VERSION")) .about("Alfred JIRA Workflow.") .subcommand(SubCommand::with_name("search") .about("Search JIRA issues") .arg(Arg::with_name("keyword") .required(true))) .get_matches(); if let Some(matches) = matches.subcommand_matches("search") { if let Some(keyword) = matches.value_of("keyword") { match workflow.search().run(&keyword.to_string()) { Ok(_) => (), Err(e) => { println!("{}", e.to_string()); std::process::exit(1); } } } >>>>>>> REPLACE
FarbodSalamat-Zadeh/TimetableApp
705918710413fb0a1a292f0ab2565a28e0ac61d7
app/src/main/java/com/satsumasoftware/timetable/PrefUtils.kt
kotlin
apache-2.0
Add functions to read/put values for pref_display_weeks_as_letters
package com.satsumasoftware.timetable import android.content.Context import android.preference.PreferenceManager import com.satsumasoftware.timetable.db.util.TimetableUtils import com.satsumasoftware.timetable.framework.Timetable class PrefUtils { companion object { const val PREF_CURRENT_TIMETABLE = "pref_current_timetable" @JvmStatic fun getCurrentTimetable(context: Context): Timetable? { val sp = PreferenceManager.getDefaultSharedPreferences(context) val timetableId = sp.getInt(PREF_CURRENT_TIMETABLE, -1) return if (timetableId == -1) { null } else { TimetableUtils.getTimetableWithId(context, timetableId) } } @JvmStatic fun setCurrentTimetable(context: Context, timetable: Timetable) { val sp = PreferenceManager.getDefaultSharedPreferences(context) sp.edit().putInt(PREF_CURRENT_TIMETABLE, timetable.id).apply() } } }
package com.satsumasoftware.timetable import android.content.Context import android.preference.PreferenceManager import com.satsumasoftware.timetable.db.util.TimetableUtils import com.satsumasoftware.timetable.framework.Timetable class PrefUtils { companion object { const val PREF_CURRENT_TIMETABLE = "pref_current_timetable" @JvmStatic fun getCurrentTimetable(context: Context): Timetable? { val sp = PreferenceManager.getDefaultSharedPreferences(context) val timetableId = sp.getInt(PREF_CURRENT_TIMETABLE, -1) return if (timetableId == -1) { null } else { TimetableUtils.getTimetableWithId(context, timetableId) } } @JvmStatic fun setCurrentTimetable(context: Context, timetable: Timetable) { val sp = PreferenceManager.getDefaultSharedPreferences(context) sp.edit().putInt(PREF_CURRENT_TIMETABLE, timetable.id).apply() } const val PREF_DISPLAY_WEEKS_AS_LETTERS = "pref_display_weeks_as_letters" @JvmStatic fun displayWeeksAsLetters(context: Context): Boolean { val sp = PreferenceManager.getDefaultSharedPreferences(context) return sp.getBoolean(PREF_DISPLAY_WEEKS_AS_LETTERS, false) } @JvmStatic fun setDisplayWeeksAsLetters(context: Context, boolean: Boolean) { val sp = PreferenceManager.getDefaultSharedPreferences(context) sp.edit().putBoolean(PREF_DISPLAY_WEEKS_AS_LETTERS, boolean).apply() } } }
13
0
1
add_only
--- a/app/src/main/java/com/satsumasoftware/timetable/PrefUtils.kt +++ b/app/src/main/java/com/satsumasoftware/timetable/PrefUtils.kt @@ -28,2 +28,15 @@ + + const val PREF_DISPLAY_WEEKS_AS_LETTERS = "pref_display_weeks_as_letters" + + @JvmStatic fun displayWeeksAsLetters(context: Context): Boolean { + val sp = PreferenceManager.getDefaultSharedPreferences(context) + return sp.getBoolean(PREF_DISPLAY_WEEKS_AS_LETTERS, false) + } + + @JvmStatic fun setDisplayWeeksAsLetters(context: Context, boolean: Boolean) { + val sp = PreferenceManager.getDefaultSharedPreferences(context) + sp.edit().putBoolean(PREF_DISPLAY_WEEKS_AS_LETTERS, boolean).apply() + } + }
--- a/app/src/main/java/com/satsumasoftware/timetable/PrefUtils.kt +++ b/app/src/main/java/com/satsumasoftware/timetable/PrefUtils.kt @@ ... @@ + + const val PREF_DISPLAY_WEEKS_AS_LETTERS = "pref_display_weeks_as_letters" + + @JvmStatic fun displayWeeksAsLetters(context: Context): Boolean { + val sp = PreferenceManager.getDefaultSharedPreferences(context) + return sp.getBoolean(PREF_DISPLAY_WEEKS_AS_LETTERS, false) + } + + @JvmStatic fun setDisplayWeeksAsLetters(context: Context, boolean: Boolean) { + val sp = PreferenceManager.getDefaultSharedPreferences(context) + sp.edit().putBoolean(PREF_DISPLAY_WEEKS_AS_LETTERS, boolean).apply() + } + }
--- a/app/src/main/java/com/satsumasoftware/timetable/PrefUtils.kt +++ b/app/src/main/java/com/satsumasoftware/timetable/PrefUtils.kt @@ -28,2 +28,15 @@ CON ADD ADD const val PREF_DISPLAY_WEEKS_AS_LETTERS = "pref_display_weeks_as_letters" ADD ADD @JvmStatic fun displayWeeksAsLetters(context: Context): Boolean { ADD val sp = PreferenceManager.getDefaultSharedPreferences(context) ADD return sp.getBoolean(PREF_DISPLAY_WEEKS_AS_LETTERS, false) ADD } ADD ADD @JvmStatic fun setDisplayWeeksAsLetters(context: Context, boolean: Boolean) { ADD val sp = PreferenceManager.getDefaultSharedPreferences(context) ADD sp.edit().putBoolean(PREF_DISPLAY_WEEKS_AS_LETTERS, boolean).apply() ADD } ADD CON }
<<<<<<< SEARCH } } } ======= } const val PREF_DISPLAY_WEEKS_AS_LETTERS = "pref_display_weeks_as_letters" @JvmStatic fun displayWeeksAsLetters(context: Context): Boolean { val sp = PreferenceManager.getDefaultSharedPreferences(context) return sp.getBoolean(PREF_DISPLAY_WEEKS_AS_LETTERS, false) } @JvmStatic fun setDisplayWeeksAsLetters(context: Context, boolean: Boolean) { val sp = PreferenceManager.getDefaultSharedPreferences(context) sp.edit().putBoolean(PREF_DISPLAY_WEEKS_AS_LETTERS, boolean).apply() } } } >>>>>>> REPLACE
gradle/gradle
0de91394e28be14659639e31ddc5a9e130c35fb3
.teamcity/settings.kts
kotlin
apache-2.0
Upgrade TeamCity DSL version to 2022.04
import common.VersionedSettingsBranch import jetbrains.buildServer.configs.kotlin.v2019_2.project import jetbrains.buildServer.configs.kotlin.v2019_2.version import projects.GradleBuildToolRootProject version = "2021.2" /* Master (buildTypeId: Gradle_Master) |----- Check (buildTypeId: Gradle_Master_Check) | |---- QuickFeedbackLinux (buildTypeId: Gradle_Master_Check_QuickFeedbackLinux) | |---- QuickFeedback | |---- ... | |---- ReadyForRelease | |----- Promotion (buildTypeId: Gradle_Master_Promotion) | |----- Nightly Snapshot | |----- ... | |----- Util |----- WarmupEc2Agent |----- AdHocPerformanceTest Release (buildTypeId: Gradle_Release) |----- Check (buildTypeId: Gradle_Release_Check) | |---- QuickFeedbackLinux (buildTypeId: Gradle_Release_Check_QuickFeedbackLinux) | |---- QuickFeedback | |---- ... | |---- ReadyForRelease | |----- Promotion (buildTypeId: Gradle_Release_Promotion) | |----- Nightly Snapshot | |----- ... | |----- Util |----- WarmupEc2Agent |----- AdHocPerformanceTest */ project(GradleBuildToolRootProject(VersionedSettingsBranch.fromDslContext()))
import common.VersionedSettingsBranch import jetbrains.buildServer.configs.kotlin.v2019_2.project import jetbrains.buildServer.configs.kotlin.v2019_2.version import projects.GradleBuildToolRootProject version = "2022.04" /* Master (buildTypeId: Gradle_Master) |----- Check (buildTypeId: Gradle_Master_Check) | |---- QuickFeedbackLinux (buildTypeId: Gradle_Master_Check_QuickFeedbackLinux) | |---- QuickFeedback | |---- ... | |---- ReadyForRelease | |----- Promotion (buildTypeId: Gradle_Master_Promotion) | |----- Nightly Snapshot | |----- ... | |----- Util |----- WarmupEc2Agent |----- AdHocPerformanceTest Release (buildTypeId: Gradle_Release) |----- Check (buildTypeId: Gradle_Release_Check) | |---- QuickFeedbackLinux (buildTypeId: Gradle_Release_Check_QuickFeedbackLinux) | |---- QuickFeedback | |---- ... | |---- ReadyForRelease | |----- Promotion (buildTypeId: Gradle_Release_Promotion) | |----- Nightly Snapshot | |----- ... | |----- Util |----- WarmupEc2Agent |----- AdHocPerformanceTest */ project(GradleBuildToolRootProject(VersionedSettingsBranch.fromDslContext()))
1
1
1
mixed
--- a/.teamcity/settings.kts +++ b/.teamcity/settings.kts @@ -5,3 +5,3 @@ -version = "2021.2" +version = "2022.04"
--- a/.teamcity/settings.kts +++ b/.teamcity/settings.kts @@ ... @@ -version = "2021.2" +version = "2022.04"
--- a/.teamcity/settings.kts +++ b/.teamcity/settings.kts @@ -5,3 +5,3 @@ CON DEL version = "2021.2" ADD version = "2022.04" CON
<<<<<<< SEARCH import projects.GradleBuildToolRootProject version = "2021.2" /* ======= import projects.GradleBuildToolRootProject version = "2022.04" /* >>>>>>> REPLACE
pachox/Rocket.Chat
49ffc1c16dc848062158c726729715a208453046
packages/rocketchat-ui/lib/fireEvent.js
javascript
mit
Add global keydown event handler Auto focus to `textarea.input-message` when not focused on any input areas.
window.fireGlobalEvent = (eventName, params) => { window.dispatchEvent(new CustomEvent(eventName, {detail: params})); if (RocketChat.settings.get('Iframe_Integration_send_enable') === true) { parent.postMessage({ eventName: eventName, data: params }, RocketChat.settings.get('Iframe_Integration_send_target_origin')); } }; window.addEventListener('message', (e) => { if (RocketChat.settings.get('Iframe_Integration_receive_enable') !== true) { return; } if (typeof e.data !== 'object' || typeof e.data.externalCommand !== 'string') { return; } let origins = RocketChat.settings.get('Iframe_Integration_receive_origin'); if (origins !== '*' && origins.split(',').indexOf(e.origin) === -1) { return console.error('Origin not allowed', e.origin); } switch (e.data.externalCommand) { case 'go': if (typeof e.data.path !== 'string' || e.data.path.trim().length === 0) { return console.error('`path` not defined'); } FlowRouter.go(e.data.path); break; } });
window.fireGlobalEvent = (eventName, params) => { window.dispatchEvent(new CustomEvent(eventName, {detail: params})); if (RocketChat.settings.get('Iframe_Integration_send_enable') === true) { parent.postMessage({ eventName: eventName, data: params }, RocketChat.settings.get('Iframe_Integration_send_target_origin')); } }; window.addEventListener('message', (e) => { if (RocketChat.settings.get('Iframe_Integration_receive_enable') !== true) { return; } if (typeof e.data !== 'object' || typeof e.data.externalCommand !== 'string') { return; } let origins = RocketChat.settings.get('Iframe_Integration_receive_origin'); if (origins !== '*' && origins.split(',').indexOf(e.origin) === -1) { return console.error('Origin not allowed', e.origin); } switch (e.data.externalCommand) { case 'go': if (typeof e.data.path !== 'string' || e.data.path.trim().length === 0) { return console.error('`path` not defined'); } FlowRouter.go(e.data.path); break; } }); window.addEventListener('keydown', (e) => { const target = e.target; if (/input|textarea/i.test(target.tagName)) { return; } const $inputMessage = $('textarea.input-message'); if (0 === $inputMessage.length) { return; } $inputMessage.focus(); });
12
0
1
add_only
--- a/packages/rocketchat-ui/lib/fireEvent.js +++ b/packages/rocketchat-ui/lib/fireEvent.js @@ -35 +35,13 @@ }); + +window.addEventListener('keydown', (e) => { + const target = e.target; + if (/input|textarea/i.test(target.tagName)) { + return; + } + const $inputMessage = $('textarea.input-message'); + if (0 === $inputMessage.length) { + return; + } + $inputMessage.focus(); +});
--- a/packages/rocketchat-ui/lib/fireEvent.js +++ b/packages/rocketchat-ui/lib/fireEvent.js @@ ... @@ }); + +window.addEventListener('keydown', (e) => { + const target = e.target; + if (/input|textarea/i.test(target.tagName)) { + return; + } + const $inputMessage = $('textarea.input-message'); + if (0 === $inputMessage.length) { + return; + } + $inputMessage.focus(); +});
--- a/packages/rocketchat-ui/lib/fireEvent.js +++ b/packages/rocketchat-ui/lib/fireEvent.js @@ -35 +35,13 @@ CON }); ADD ADD window.addEventListener('keydown', (e) => { ADD const target = e.target; ADD if (/input|textarea/i.test(target.tagName)) { ADD return; ADD } ADD const $inputMessage = $('textarea.input-message'); ADD if (0 === $inputMessage.length) { ADD return; ADD } ADD $inputMessage.focus(); ADD });
<<<<<<< SEARCH } }); ======= } }); window.addEventListener('keydown', (e) => { const target = e.target; if (/input|textarea/i.test(target.tagName)) { return; } const $inputMessage = $('textarea.input-message'); if (0 === $inputMessage.length) { return; } $inputMessage.focus(); }); >>>>>>> REPLACE
jalama/drupdates
375657de7eff50c182bd78196d22101a1a8bcb91
setup.py
python
mit
Update Development Status to stable
""" Drupdates setup script. """ try: from setuptools import setup except ImportError: from distutils.core import setup setup( name='Drupdates', description='Drupal updates scripts', author='Jim Taylor', url='https://github.com/jalama/drupdates', download_url='https://github.com/jalama/drupdates', author_email='[email protected]', version='1.4.0', package_dir={'drupdates' : 'drupdates', 'drupdates.tests' : 'drupdates/tests'}, include_package_data=True, install_requires=['nose', 'gitpython', 'requests', 'pyyaml'], entry_points={ 'console_scripts': ['drupdates = drupdates.cli:main'], }, packages=['drupdates', 'drupdates.tests'], classifiers=[ 'Development Status :: 4 - Beta', 'Environment :: Console', 'Intended Audience :: Developers', 'Intended Audience :: System Administrators', 'License :: OSI Approved :: MIT License', 'Natural Language :: English', 'Operating System :: MacOS :: MacOS X', 'Operating System :: POSIX', 'Programming Language :: Python', 'Programming Language :: Python :: 2', 'Programming Language :: Python :: 2.6', 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3', 'Programming Language :: Python :: 3.3', 'Programming Language :: Python :: 3.4', 'Topic :: System :: Systems Administration', 'Topic :: Software Development :: Build Tools', 'Topic :: Software Development :: Bug Tracking', ], )
""" Drupdates setup script. """ try: from setuptools import setup except ImportError: from distutils.core import setup setup( name='Drupdates', description='Drupal updates scripts', author='Jim Taylor', url='https://github.com/jalama/drupdates', download_url='https://github.com/jalama/drupdates', author_email='[email protected]', version='1.4.0', package_dir={'drupdates' : 'drupdates', 'drupdates.tests' : 'drupdates/tests'}, include_package_data=True, install_requires=['nose', 'gitpython', 'requests', 'pyyaml'], entry_points={ 'console_scripts': ['drupdates = drupdates.cli:main'], }, packages=['drupdates', 'drupdates.tests'], classifiers=[ 'Development Status :: 5 - Production/Stable', 'Environment :: Console', 'Intended Audience :: Developers', 'Intended Audience :: System Administrators', 'License :: OSI Approved :: MIT License', 'Natural Language :: English', 'Operating System :: MacOS :: MacOS X', 'Operating System :: POSIX', 'Programming Language :: Python', 'Programming Language :: Python :: 2', 'Programming Language :: Python :: 2.6', 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3', 'Programming Language :: Python :: 3.3', 'Programming Language :: Python :: 3.4', 'Topic :: System :: Systems Administration', 'Topic :: Software Development :: Build Tools', 'Topic :: Software Development :: Bug Tracking', ], )
1
1
1
mixed
--- a/setup.py +++ b/setup.py @@ -22,3 +22,3 @@ classifiers=[ - 'Development Status :: 4 - Beta', + 'Development Status :: 5 - Production/Stable', 'Environment :: Console',
--- a/setup.py +++ b/setup.py @@ ... @@ classifiers=[ - 'Development Status :: 4 - Beta', + 'Development Status :: 5 - Production/Stable', 'Environment :: Console',
--- a/setup.py +++ b/setup.py @@ -22,3 +22,3 @@ CON classifiers=[ DEL 'Development Status :: 4 - Beta', ADD 'Development Status :: 5 - Production/Stable', CON 'Environment :: Console',
<<<<<<< SEARCH packages=['drupdates', 'drupdates.tests'], classifiers=[ 'Development Status :: 4 - Beta', 'Environment :: Console', 'Intended Audience :: Developers', ======= packages=['drupdates', 'drupdates.tests'], classifiers=[ 'Development Status :: 5 - Production/Stable', 'Environment :: Console', 'Intended Audience :: Developers', >>>>>>> REPLACE
kenrick/ludo.js
cea7c7de97ddf87a1a4d592e236a5291b6b64510
src/coordinate.js
javascript
mit
Refactor the nextCoordsFrom function to use less if statements
import { List, is } from 'immutable'; import { isUndefined } from 'lodash'; export function nextCoordsFrom({ path, alternate, switchCoord, fromCoord, next }, list = List()) { if(next === 0) { return list; } let p = path; let nextCoord; // switch the path to the alternate path if the fromCoord is equal to // the switchCoord or is in the alternate path already if(switchCoord.equals(fromCoord) || alternate.includes(fromCoord)) { p = alternate; } // if there are more next(s) than alternate coords set nextCoord to undefined // because we have reached the end of the path // or // fromCoord will be undefined if at the end of the alternate path // so just continue to be undefined if(alternate.includes(fromCoord) && is(fromCoord, alternate.last()) || isUndefined(fromCoord)) { // Do not set nextCoord } // if fromCoord is at the end of the path or not on the path // reset to to the beginning else if (is(fromCoord, p.last()) || !p.includes(fromCoord)) { nextCoord = p.get(0); } // else progress on to the next coord on the path. else { nextCoord = p.get(p.indexOf(fromCoord) + 1); } return nextCoordsFrom({ path: p, alternate, switchCoord, fromCoord: nextCoord, next: next - 1 }, list.push(nextCoord)); }
import { List, is } from 'immutable'; import { isUndefined } from 'lodash'; export function nextCoordsFrom({ path, alternate, switchCoord, fromCoord, next }, list = List()) { if(next === 0) { return list; } let p = path; let nextCoord; // switch the path to the alternate path if the fromCoord is equal to // the switchCoord or is in the alternate path already if(switchCoord.equals(fromCoord) || alternate.includes(fromCoord)) { p = alternate; } // Only allow the nextCoord to be set if we are not at the end of the // alternate path and fromCoord is not undefined // otherwise nextCoord will be undefined if(!is(fromCoord, alternate.last()) && !isUndefined(fromCoord)) { // if fromCoord is at the end of the path or not on the path // reset to the beginning if (is(fromCoord, p.last()) || !p.includes(fromCoord)) { nextCoord = p.get(0); } // else progress on to the next coord on the path. else { nextCoord = p.get(p.indexOf(fromCoord) + 1); } } return nextCoordsFrom({ path: p, alternate, switchCoord, fromCoord: nextCoord, next: next - 1 }, list.push(nextCoord)); }
13
16
1
mixed
--- a/src/coordinate.js +++ b/src/coordinate.js @@ -23,18 +23,15 @@ - // if there are more next(s) than alternate coords set nextCoord to undefined - // because we have reached the end of the path - // or - // fromCoord will be undefined if at the end of the alternate path - // so just continue to be undefined - if(alternate.includes(fromCoord) && is(fromCoord, alternate.last()) || isUndefined(fromCoord)) { - // Do not set nextCoord - } - // if fromCoord is at the end of the path or not on the path - // reset to to the beginning - else if (is(fromCoord, p.last()) || !p.includes(fromCoord)) { - nextCoord = p.get(0); - } - // else progress on to the next coord on the path. - else { - nextCoord = p.get(p.indexOf(fromCoord) + 1); + // Only allow the nextCoord to be set if we are not at the end of the + // alternate path and fromCoord is not undefined + // otherwise nextCoord will be undefined + if(!is(fromCoord, alternate.last()) && !isUndefined(fromCoord)) { + // if fromCoord is at the end of the path or not on the path + // reset to the beginning + if (is(fromCoord, p.last()) || !p.includes(fromCoord)) { + nextCoord = p.get(0); + } + // else progress on to the next coord on the path. + else { + nextCoord = p.get(p.indexOf(fromCoord) + 1); + } }
--- a/src/coordinate.js +++ b/src/coordinate.js @@ ... @@ - // if there are more next(s) than alternate coords set nextCoord to undefined - // because we have reached the end of the path - // or - // fromCoord will be undefined if at the end of the alternate path - // so just continue to be undefined - if(alternate.includes(fromCoord) && is(fromCoord, alternate.last()) || isUndefined(fromCoord)) { - // Do not set nextCoord - } - // if fromCoord is at the end of the path or not on the path - // reset to to the beginning - else if (is(fromCoord, p.last()) || !p.includes(fromCoord)) { - nextCoord = p.get(0); - } - // else progress on to the next coord on the path. - else { - nextCoord = p.get(p.indexOf(fromCoord) + 1); + // Only allow the nextCoord to be set if we are not at the end of the + // alternate path and fromCoord is not undefined + // otherwise nextCoord will be undefined + if(!is(fromCoord, alternate.last()) && !isUndefined(fromCoord)) { + // if fromCoord is at the end of the path or not on the path + // reset to the beginning + if (is(fromCoord, p.last()) || !p.includes(fromCoord)) { + nextCoord = p.get(0); + } + // else progress on to the next coord on the path. + else { + nextCoord = p.get(p.indexOf(fromCoord) + 1); + } }
--- a/src/coordinate.js +++ b/src/coordinate.js @@ -23,18 +23,15 @@ CON DEL // if there are more next(s) than alternate coords set nextCoord to undefined DEL // because we have reached the end of the path DEL // or DEL // fromCoord will be undefined if at the end of the alternate path DEL // so just continue to be undefined DEL if(alternate.includes(fromCoord) && is(fromCoord, alternate.last()) || isUndefined(fromCoord)) { DEL // Do not set nextCoord DEL } DEL // if fromCoord is at the end of the path or not on the path DEL // reset to to the beginning DEL else if (is(fromCoord, p.last()) || !p.includes(fromCoord)) { DEL nextCoord = p.get(0); DEL } DEL // else progress on to the next coord on the path. DEL else { DEL nextCoord = p.get(p.indexOf(fromCoord) + 1); ADD // Only allow the nextCoord to be set if we are not at the end of the ADD // alternate path and fromCoord is not undefined ADD // otherwise nextCoord will be undefined ADD if(!is(fromCoord, alternate.last()) && !isUndefined(fromCoord)) { ADD // if fromCoord is at the end of the path or not on the path ADD // reset to the beginning ADD if (is(fromCoord, p.last()) || !p.includes(fromCoord)) { ADD nextCoord = p.get(0); ADD } ADD // else progress on to the next coord on the path. ADD else { ADD nextCoord = p.get(p.indexOf(fromCoord) + 1); ADD } CON }
<<<<<<< SEARCH } // if there are more next(s) than alternate coords set nextCoord to undefined // because we have reached the end of the path // or // fromCoord will be undefined if at the end of the alternate path // so just continue to be undefined if(alternate.includes(fromCoord) && is(fromCoord, alternate.last()) || isUndefined(fromCoord)) { // Do not set nextCoord } // if fromCoord is at the end of the path or not on the path // reset to to the beginning else if (is(fromCoord, p.last()) || !p.includes(fromCoord)) { nextCoord = p.get(0); } // else progress on to the next coord on the path. else { nextCoord = p.get(p.indexOf(fromCoord) + 1); } ======= } // Only allow the nextCoord to be set if we are not at the end of the // alternate path and fromCoord is not undefined // otherwise nextCoord will be undefined if(!is(fromCoord, alternate.last()) && !isUndefined(fromCoord)) { // if fromCoord is at the end of the path or not on the path // reset to the beginning if (is(fromCoord, p.last()) || !p.includes(fromCoord)) { nextCoord = p.get(0); } // else progress on to the next coord on the path. else { nextCoord = p.get(p.indexOf(fromCoord) + 1); } } >>>>>>> REPLACE
yuttie/tilda
46a4e2d0b96b42be2d2f31b5a14863891f093f26
src/main.rs
rust
mit
Add functions to load dataset
fn main() { println!("Hello, world!"); }
use std::string::String; use std::collections::HashMap; use std::vec::Vec; use std::fs::{File}; use std::env; use std::path::{Path}; use std::io::{self, BufReader, BufRead}; type Bag = HashMap<usize, usize>; fn load_bags<P: AsRef<Path>>(path: P) -> io::Result<(Vec<usize>, Vec<Bag>)> { let mut bags = Vec::new(); let mut labels = Vec::new(); let file = try!(File::open(path)); let file = BufReader::new(file); for line in file.lines() { let line = line.unwrap(); let mut iter = line.split_whitespace(); let label = iter.next().unwrap().parse::<usize>().unwrap(); let mut bag = Bag::new(); for elm in iter { let mut iter = elm.split(':'); let index = iter.next().unwrap().parse::<usize>().unwrap(); let value = iter.next().unwrap().parse::<usize>().unwrap(); bag.insert(index, value); } labels.push(label); bags.push(bag); } Ok((labels, bags)) } fn load_text_vocabulary<P: AsRef<Path>>(path: P) -> io::Result<Vec<String>> { let mut vocab = Vec::new(); let file = try!(File::open(path)); let file = BufReader::new(file); for line in file.lines() { let line = line.unwrap(); vocab.push(line); } Ok(vocab) } fn main() { let args: Vec<String> = env::args().collect(); let (labels, dataset) = load_bags(&args[1]).unwrap(); let vocab: Option<Vec<String>> = if args.len() < 3 { None } else { Some(load_text_vocabulary(&args[2]).unwrap()) }; println!("{:?}", labels); println!("{:?}", dataset); println!("{:?}", vocab); }
55
1
1
mixed
--- a/src/main.rs +++ b/src/main.rs @@ -1,3 +1,57 @@ +use std::string::String; +use std::collections::HashMap; +use std::vec::Vec; +use std::fs::{File}; +use std::env; +use std::path::{Path}; +use std::io::{self, BufReader, BufRead}; + + +type Bag = HashMap<usize, usize>; + +fn load_bags<P: AsRef<Path>>(path: P) -> io::Result<(Vec<usize>, Vec<Bag>)> { + let mut bags = Vec::new(); + let mut labels = Vec::new(); + let file = try!(File::open(path)); + let file = BufReader::new(file); + for line in file.lines() { + let line = line.unwrap(); + let mut iter = line.split_whitespace(); + let label = iter.next().unwrap().parse::<usize>().unwrap(); + let mut bag = Bag::new(); + for elm in iter { + let mut iter = elm.split(':'); + let index = iter.next().unwrap().parse::<usize>().unwrap(); + let value = iter.next().unwrap().parse::<usize>().unwrap(); + bag.insert(index, value); + } + labels.push(label); + bags.push(bag); + } + Ok((labels, bags)) +} + +fn load_text_vocabulary<P: AsRef<Path>>(path: P) -> io::Result<Vec<String>> { + let mut vocab = Vec::new(); + let file = try!(File::open(path)); + let file = BufReader::new(file); + for line in file.lines() { + let line = line.unwrap(); + vocab.push(line); + } + Ok(vocab) +} + fn main() { - println!("Hello, world!"); + let args: Vec<String> = env::args().collect(); + let (labels, dataset) = load_bags(&args[1]).unwrap(); + let vocab: Option<Vec<String>> = if args.len() < 3 { + None + } + else { + Some(load_text_vocabulary(&args[2]).unwrap()) + }; + println!("{:?}", labels); + println!("{:?}", dataset); + println!("{:?}", vocab); }
--- a/src/main.rs +++ b/src/main.rs @@ ... @@ +use std::string::String; +use std::collections::HashMap; +use std::vec::Vec; +use std::fs::{File}; +use std::env; +use std::path::{Path}; +use std::io::{self, BufReader, BufRead}; + + +type Bag = HashMap<usize, usize>; + +fn load_bags<P: AsRef<Path>>(path: P) -> io::Result<(Vec<usize>, Vec<Bag>)> { + let mut bags = Vec::new(); + let mut labels = Vec::new(); + let file = try!(File::open(path)); + let file = BufReader::new(file); + for line in file.lines() { + let line = line.unwrap(); + let mut iter = line.split_whitespace(); + let label = iter.next().unwrap().parse::<usize>().unwrap(); + let mut bag = Bag::new(); + for elm in iter { + let mut iter = elm.split(':'); + let index = iter.next().unwrap().parse::<usize>().unwrap(); + let value = iter.next().unwrap().parse::<usize>().unwrap(); + bag.insert(index, value); + } + labels.push(label); + bags.push(bag); + } + Ok((labels, bags)) +} + +fn load_text_vocabulary<P: AsRef<Path>>(path: P) -> io::Result<Vec<String>> { + let mut vocab = Vec::new(); + let file = try!(File::open(path)); + let file = BufReader::new(file); + for line in file.lines() { + let line = line.unwrap(); + vocab.push(line); + } + Ok(vocab) +} + fn main() { - println!("Hello, world!"); + let args: Vec<String> = env::args().collect(); + let (labels, dataset) = load_bags(&args[1]).unwrap(); + let vocab: Option<Vec<String>> = if args.len() < 3 { + None + } + else { + Some(load_text_vocabulary(&args[2]).unwrap()) + }; + println!("{:?}", labels); + println!("{:?}", dataset); + println!("{:?}", vocab); }
--- a/src/main.rs +++ b/src/main.rs @@ -1,3 +1,57 @@ ADD use std::string::String; ADD use std::collections::HashMap; ADD use std::vec::Vec; ADD use std::fs::{File}; ADD use std::env; ADD use std::path::{Path}; ADD use std::io::{self, BufReader, BufRead}; ADD ADD ADD type Bag = HashMap<usize, usize>; ADD ADD fn load_bags<P: AsRef<Path>>(path: P) -> io::Result<(Vec<usize>, Vec<Bag>)> { ADD let mut bags = Vec::new(); ADD let mut labels = Vec::new(); ADD let file = try!(File::open(path)); ADD let file = BufReader::new(file); ADD for line in file.lines() { ADD let line = line.unwrap(); ADD let mut iter = line.split_whitespace(); ADD let label = iter.next().unwrap().parse::<usize>().unwrap(); ADD let mut bag = Bag::new(); ADD for elm in iter { ADD let mut iter = elm.split(':'); ADD let index = iter.next().unwrap().parse::<usize>().unwrap(); ADD let value = iter.next().unwrap().parse::<usize>().unwrap(); ADD bag.insert(index, value); ADD } ADD labels.push(label); ADD bags.push(bag); ADD } ADD Ok((labels, bags)) ADD } ADD ADD fn load_text_vocabulary<P: AsRef<Path>>(path: P) -> io::Result<Vec<String>> { ADD let mut vocab = Vec::new(); ADD let file = try!(File::open(path)); ADD let file = BufReader::new(file); ADD for line in file.lines() { ADD let line = line.unwrap(); ADD vocab.push(line); ADD } ADD Ok(vocab) ADD } ADD CON fn main() { DEL println!("Hello, world!"); ADD let args: Vec<String> = env::args().collect(); ADD let (labels, dataset) = load_bags(&args[1]).unwrap(); ADD let vocab: Option<Vec<String>> = if args.len() < 3 { ADD None ADD } ADD else { ADD Some(load_text_vocabulary(&args[2]).unwrap()) ADD }; ADD println!("{:?}", labels); ADD println!("{:?}", dataset); ADD println!("{:?}", vocab); CON }
<<<<<<< SEARCH fn main() { println!("Hello, world!"); } ======= use std::string::String; use std::collections::HashMap; use std::vec::Vec; use std::fs::{File}; use std::env; use std::path::{Path}; use std::io::{self, BufReader, BufRead}; type Bag = HashMap<usize, usize>; fn load_bags<P: AsRef<Path>>(path: P) -> io::Result<(Vec<usize>, Vec<Bag>)> { let mut bags = Vec::new(); let mut labels = Vec::new(); let file = try!(File::open(path)); let file = BufReader::new(file); for line in file.lines() { let line = line.unwrap(); let mut iter = line.split_whitespace(); let label = iter.next().unwrap().parse::<usize>().unwrap(); let mut bag = Bag::new(); for elm in iter { let mut iter = elm.split(':'); let index = iter.next().unwrap().parse::<usize>().unwrap(); let value = iter.next().unwrap().parse::<usize>().unwrap(); bag.insert(index, value); } labels.push(label); bags.push(bag); } Ok((labels, bags)) } fn load_text_vocabulary<P: AsRef<Path>>(path: P) -> io::Result<Vec<String>> { let mut vocab = Vec::new(); let file = try!(File::open(path)); let file = BufReader::new(file); for line in file.lines() { let line = line.unwrap(); vocab.push(line); } Ok(vocab) } fn main() { let args: Vec<String> = env::args().collect(); let (labels, dataset) = load_bags(&args[1]).unwrap(); let vocab: Option<Vec<String>> = if args.len() < 3 { None } else { Some(load_text_vocabulary(&args[2]).unwrap()) }; println!("{:?}", labels); println!("{:?}", dataset); println!("{:?}", vocab); } >>>>>>> REPLACE
stefanw/froide
ad477285f4458145bca378b74dcb8cfe3abeaf06
froide/bounce/apps.py
python
mit
Add unsubscribe reference to mails through context
import json from django.apps import AppConfig from django.utils.translation import ugettext_lazy as _ class BounceConfig(AppConfig): name = 'froide.bounce' verbose_name = _('Bounce') def ready(self): from froide.account import account_canceled from froide.account.export import registry account_canceled.connect(cancel_user) registry.register(export_user_data) def cancel_user(sender, user=None, **kwargs): from .models import Bounce if user is None: return Bounce.objects.filter(user=user).delete() def export_user_data(user): from .models import Bounce bounces = Bounce.objects.filter(user=user) if not bounces: return yield ('bounces.json', json.dumps([ { 'last_update': ( b.last_update.isoformat() if b.last_update else None ), 'bounces': b.bounces, 'email': b.email, } for b in bounces]).encode('utf-8') )
import json from django.apps import AppConfig from django.utils.translation import ugettext_lazy as _ class BounceConfig(AppConfig): name = 'froide.bounce' verbose_name = _('Bounce') def ready(self): from froide.account import account_canceled from froide.account.export import registry from froide.helper.email_sending import mail_middleware_registry account_canceled.connect(cancel_user) registry.register(export_user_data) mail_middleware_registry.register(UnsubscribeReferenceMailMiddleware()) def cancel_user(sender, user=None, **kwargs): from .models import Bounce if user is None: return Bounce.objects.filter(user=user).delete() def export_user_data(user): from .models import Bounce bounces = Bounce.objects.filter(user=user) if not bounces: return yield ('bounces.json', json.dumps([ { 'last_update': ( b.last_update.isoformat() if b.last_update else None ), 'bounces': b.bounces, 'email': b.email, } for b in bounces]).encode('utf-8') ) class UnsubscribeReferenceMailMiddleware: ''' Moves unsubscribe_reference from mail render context to email sending kwargs ''' def enhance_email_kwargs(self, mail_intent, context, email_kwargs): unsubscribe_reference = context.get('unsubscribe_reference') if unsubscribe_reference is None: return return { 'unsubscribe_reference': unsubscribe_reference }
17
0
2
add_only
--- a/froide/bounce/apps.py +++ b/froide/bounce/apps.py @@ -14,4 +14,7 @@ + from froide.helper.email_sending import mail_middleware_registry + account_canceled.connect(cancel_user) registry.register(export_user_data) + mail_middleware_registry.register(UnsubscribeReferenceMailMiddleware()) @@ -42 +45,15 @@ ) + + +class UnsubscribeReferenceMailMiddleware: + ''' + Moves unsubscribe_reference from mail render context + to email sending kwargs + ''' + def enhance_email_kwargs(self, mail_intent, context, email_kwargs): + unsubscribe_reference = context.get('unsubscribe_reference') + if unsubscribe_reference is None: + return + return { + 'unsubscribe_reference': unsubscribe_reference + }
--- a/froide/bounce/apps.py +++ b/froide/bounce/apps.py @@ ... @@ + from froide.helper.email_sending import mail_middleware_registry + account_canceled.connect(cancel_user) registry.register(export_user_data) + mail_middleware_registry.register(UnsubscribeReferenceMailMiddleware()) @@ ... @@ ) + + +class UnsubscribeReferenceMailMiddleware: + ''' + Moves unsubscribe_reference from mail render context + to email sending kwargs + ''' + def enhance_email_kwargs(self, mail_intent, context, email_kwargs): + unsubscribe_reference = context.get('unsubscribe_reference') + if unsubscribe_reference is None: + return + return { + 'unsubscribe_reference': unsubscribe_reference + }
--- a/froide/bounce/apps.py +++ b/froide/bounce/apps.py @@ -14,4 +14,7 @@ CON ADD from froide.helper.email_sending import mail_middleware_registry ADD CON account_canceled.connect(cancel_user) CON registry.register(export_user_data) ADD mail_middleware_registry.register(UnsubscribeReferenceMailMiddleware()) CON @@ -42 +45,15 @@ CON ) ADD ADD ADD class UnsubscribeReferenceMailMiddleware: ADD ''' ADD Moves unsubscribe_reference from mail render context ADD to email sending kwargs ADD ''' ADD def enhance_email_kwargs(self, mail_intent, context, email_kwargs): ADD unsubscribe_reference = context.get('unsubscribe_reference') ADD if unsubscribe_reference is None: ADD return ADD return { ADD 'unsubscribe_reference': unsubscribe_reference ADD }
<<<<<<< SEARCH from froide.account.export import registry account_canceled.connect(cancel_user) registry.register(export_user_data) ======= from froide.account.export import registry from froide.helper.email_sending import mail_middleware_registry account_canceled.connect(cancel_user) registry.register(export_user_data) mail_middleware_registry.register(UnsubscribeReferenceMailMiddleware()) >>>>>>> REPLACE <<<<<<< SEARCH for b in bounces]).encode('utf-8') ) ======= for b in bounces]).encode('utf-8') ) class UnsubscribeReferenceMailMiddleware: ''' Moves unsubscribe_reference from mail render context to email sending kwargs ''' def enhance_email_kwargs(self, mail_intent, context, email_kwargs): unsubscribe_reference = context.get('unsubscribe_reference') if unsubscribe_reference is None: return return { 'unsubscribe_reference': unsubscribe_reference } >>>>>>> REPLACE
rust-gnome/gdk
f0d043ee0a5d6544172f2159388af1b26f14ccb3
src/event_scroll.rs
rust
mit
Add missing getters for EventScroll
// Copyright 2016, The Gtk-rs Project Developers. // See the COPYRIGHT file at the top-level directory of this distribution. // Licensed under the MIT license, see the LICENSE file or <http://opensource.org/licenses/MIT> use glib::translate::*; #[derive(Clone, Debug)] pub struct EventScroll(::Event); event_wrapper!(EventScroll, GdkEventScroll); event_subtype!(EventScroll, Scroll); impl EventScroll { pub fn get_time(&self) -> u32 { self.as_ref().time } pub fn get_position(&self) -> (f64, f64) { let x = self.as_ref().x; let y = self.as_ref().y; (x, y) } pub fn get_state(&self) -> ::ModifierType { from_glib(self.as_ref().state) } pub fn get_device(&self) -> Option<::Device> { unsafe { from_glib_none(self.as_ref().device) } } pub fn get_delta(&self) -> (f64, f64) { let dx = self.as_ref().delta_x; let dy = self.as_ref().delta_y; (dx, dy) } }
// Copyright 2016, The Gtk-rs Project Developers. // See the COPYRIGHT file at the top-level directory of this distribution. // Licensed under the MIT license, see the LICENSE file or <http://opensource.org/licenses/MIT> use glib::translate::*; #[derive(Clone, Debug)] pub struct EventScroll(::Event); event_wrapper!(EventScroll, GdkEventScroll); event_subtype!(EventScroll, Scroll); impl EventScroll { pub fn get_time(&self) -> u32 { self.as_ref().time } pub fn get_position(&self) -> (f64, f64) { let x = self.as_ref().x; let y = self.as_ref().y; (x, y) } pub fn get_state(&self) -> ::ModifierType { from_glib(self.as_ref().state) } pub fn get_device(&self) -> Option<::Device> { unsafe { from_glib_none(self.as_ref().device) } } pub fn get_direction(&self) -> ::ScrollDirection { from_glib(self.as_ref().direction) } pub fn get_root(&self) -> (f64, f64) { let x_root = self.as_ref().x_root; let y_root = self.as_ref().y_root; (x_root, y_root) } pub fn get_delta(&self) -> (f64, f64) { let dx = self.as_ref().delta_x; let dy = self.as_ref().delta_y; (dx, dy) } }
10
0
1
add_only
--- a/src/event_scroll.rs +++ b/src/event_scroll.rs @@ -31,2 +31,12 @@ + pub fn get_direction(&self) -> ::ScrollDirection { + from_glib(self.as_ref().direction) + } + + pub fn get_root(&self) -> (f64, f64) { + let x_root = self.as_ref().x_root; + let y_root = self.as_ref().y_root; + (x_root, y_root) + } + pub fn get_delta(&self) -> (f64, f64) {
--- a/src/event_scroll.rs +++ b/src/event_scroll.rs @@ ... @@ + pub fn get_direction(&self) -> ::ScrollDirection { + from_glib(self.as_ref().direction) + } + + pub fn get_root(&self) -> (f64, f64) { + let x_root = self.as_ref().x_root; + let y_root = self.as_ref().y_root; + (x_root, y_root) + } + pub fn get_delta(&self) -> (f64, f64) {
--- a/src/event_scroll.rs +++ b/src/event_scroll.rs @@ -31,2 +31,12 @@ CON ADD pub fn get_direction(&self) -> ::ScrollDirection { ADD from_glib(self.as_ref().direction) ADD } ADD ADD pub fn get_root(&self) -> (f64, f64) { ADD let x_root = self.as_ref().x_root; ADD let y_root = self.as_ref().y_root; ADD (x_root, y_root) ADD } ADD CON pub fn get_delta(&self) -> (f64, f64) {
<<<<<<< SEARCH } pub fn get_delta(&self) -> (f64, f64) { let dx = self.as_ref().delta_x; ======= } pub fn get_direction(&self) -> ::ScrollDirection { from_glib(self.as_ref().direction) } pub fn get_root(&self) -> (f64, f64) { let x_root = self.as_ref().x_root; let y_root = self.as_ref().y_root; (x_root, y_root) } pub fn get_delta(&self) -> (f64, f64) { let dx = self.as_ref().delta_x; >>>>>>> REPLACE
bbqsrc/kbdi
419d81c18a9ce5b7c4b765cf7741abeb1f490f01
src/platform/winnls.rs
rust
isc
Make win8 fix coexist with win10
use std::io; use winapi::ctypes::c_int; use winapi::um::winnls as sys_winnls; use winrust::{from_wide_string, to_wide_string}; const MAX_LOCALE_NAME_LEN: usize = 85usize; // TODO: remove panics pub fn resolve_locale_name(tag: &str) -> Option<String> { let mut buf = vec![0u16; MAX_LOCALE_NAME_LEN]; let ret = unsafe { sys_winnls::ResolveLocaleName( to_wide_string(tag).as_ptr(), buf.as_mut_ptr(), MAX_LOCALE_NAME_LEN as c_int ) }; if ret == 0 { let err = io::Error::last_os_error(); println!("{:?}", err); panic!(); } buf.truncate(ret as usize - 1); if buf.len() == 0 { return None; } Some(from_wide_string(&buf).unwrap()) } pub fn locale_name_to_lcid(locale_name: &str) -> Result<u32, io::Error> { let tag = resolve_locale_name(locale_name).unwrap(); let ret = unsafe { sys_winnls::LocaleNameToLCID(to_wide_string(&tag).as_ptr(), 0) }; match ret { 0 => Err(io::Error::last_os_error()), _ => Ok(ret) } }
use std::io; use winapi::ctypes::c_int; use winapi::um::winnls as sys_winnls; use winrust::{from_wide_string, to_wide_string}; const MAX_LOCALE_NAME_LEN: usize = 85usize; // TODO: remove panics pub fn resolve_locale_name(tag: &str) -> Option<String> { let mut buf = vec![0u16; MAX_LOCALE_NAME_LEN]; let ret = unsafe { sys_winnls::ResolveLocaleName( to_wide_string(tag).as_ptr(), buf.as_mut_ptr(), MAX_LOCALE_NAME_LEN as c_int ) }; if ret == 0 { let err = io::Error::last_os_error(); println!("{:?}", err); panic!(); } buf.truncate(ret as usize - 1); if buf.len() == 0 { return None; } Some(from_wide_string(&buf).unwrap()) } pub fn locale_name_to_lcid(locale_name: &str) -> Result<u32, io::Error> { let tag = resolve_locale_name(locale_name) .unwrap_or(locale_name.to_owned()); let ret = unsafe { sys_winnls::LocaleNameToLCID(to_wide_string(&tag).as_ptr(), 0) }; match ret { 0 => Err(io::Error::last_os_error()), _ => Ok(ret) } }
2
1
1
mixed
--- a/src/platform/winnls.rs +++ b/src/platform/winnls.rs @@ -35,3 +35,4 @@ pub fn locale_name_to_lcid(locale_name: &str) -> Result<u32, io::Error> { - let tag = resolve_locale_name(locale_name).unwrap(); + let tag = resolve_locale_name(locale_name) + .unwrap_or(locale_name.to_owned());
--- a/src/platform/winnls.rs +++ b/src/platform/winnls.rs @@ ... @@ pub fn locale_name_to_lcid(locale_name: &str) -> Result<u32, io::Error> { - let tag = resolve_locale_name(locale_name).unwrap(); + let tag = resolve_locale_name(locale_name) + .unwrap_or(locale_name.to_owned());
--- a/src/platform/winnls.rs +++ b/src/platform/winnls.rs @@ -35,3 +35,4 @@ CON pub fn locale_name_to_lcid(locale_name: &str) -> Result<u32, io::Error> { DEL let tag = resolve_locale_name(locale_name).unwrap(); ADD let tag = resolve_locale_name(locale_name) ADD .unwrap_or(locale_name.to_owned()); CON
<<<<<<< SEARCH pub fn locale_name_to_lcid(locale_name: &str) -> Result<u32, io::Error> { let tag = resolve_locale_name(locale_name).unwrap(); let ret = unsafe { ======= pub fn locale_name_to_lcid(locale_name: &str) -> Result<u32, io::Error> { let tag = resolve_locale_name(locale_name) .unwrap_or(locale_name.to_owned()); let ret = unsafe { >>>>>>> REPLACE
Shiroy/lalr_gen
80c0ec7ac4045c8a7fe60020ffcb4a51788c5f92
src/generator.rs
rust
mit
Write the generated code into a file
extern crate liquid; use grammar::{LexicalUnit, ProductionRule, RuleComponent, Grammar}; use self::liquid::{Renderable, Context, Value, FilterError}; static template : &'static str = include_str!("parser.liquid"); pub fn generate(grammar: Grammar) { let tmplt = liquid::parse(template, Default::default()).unwrap(); let mut ctx = Context::new(); ctx.add_filter("capitalize", Box::new(|input, _args| { if let &Value::Str(ref s) = input { let res = s.chars().enumerate().map(|(i, c)| if i == 0 { c.to_uppercase().next().unwrap() } else { c.to_lowercase().next().unwrap() }).collect(); Ok(Value::Str(res)) } else { Err(FilterError::InvalidType("Expected a string".to_owned())) } })); ctx.set_val("production_rules", Value::Array(grammar.get_all_production_rules_name() .iter() .map(|x| Value::Str(x.clone())) .collect())); match tmplt.render(&mut ctx) { Err(msg) => println!("Error : {}", msg), Ok(generated_code) => save_generated_code(generated_code.unwrap()), } } pub fn save_generated_code(code : String) { println!("{}", code); }
extern crate liquid; use grammar::{LexicalUnit, ProductionRule, RuleComponent, Grammar}; use self::liquid::{Renderable, Context, Value, FilterError}; use std::fs::File; use std::io; use std::io::Write; static template : &'static str = include_str!("parser.liquid"); pub fn generate(grammar: Grammar) { let tmplt = liquid::parse(template, Default::default()).unwrap(); let mut ctx = Context::new(); ctx.add_filter("capitalize", Box::new(|input, _args| { if let &Value::Str(ref s) = input { let res = s.chars().enumerate().map(|(i, c)| if i == 0 { c.to_uppercase().next().unwrap() } else { c.to_lowercase().next().unwrap() }).collect(); Ok(Value::Str(res)) } else { Err(FilterError::InvalidType("Expected a string".to_owned())) } })); ctx.set_val("production_rules", Value::Array(grammar.get_all_production_rules_name() .iter() .map(|x| Value::Str(x.clone())) .collect())); match tmplt.render(&mut ctx) { Err(msg) => println!("Error : {}", msg), Ok(generated_code) => { match save_generated_code(generated_code.unwrap()) { Ok(()) => {}, Err(err) => println!("Error : {}", err), } }, } } pub fn save_generated_code(code : String) -> io::Result<()> { let mut f = try!(File::create("out.rs")); try!(f.write_all(code.as_bytes())); Ok(()) }
14
3
3
mixed
--- a/src/generator.rs +++ b/src/generator.rs @@ -4,2 +4,5 @@ use self::liquid::{Renderable, Context, Value, FilterError}; +use std::fs::File; +use std::io; +use std::io::Write; @@ -33,3 +36,8 @@ Err(msg) => println!("Error : {}", msg), - Ok(generated_code) => save_generated_code(generated_code.unwrap()), + Ok(generated_code) => { + match save_generated_code(generated_code.unwrap()) { + Ok(()) => {}, + Err(err) => println!("Error : {}", err), + } + }, } @@ -37,4 +45,7 @@ -pub fn save_generated_code(code : String) { - println!("{}", code); +pub fn save_generated_code(code : String) -> io::Result<()> { + let mut f = try!(File::create("out.rs")); + try!(f.write_all(code.as_bytes())); + + Ok(()) }
--- a/src/generator.rs +++ b/src/generator.rs @@ ... @@ use self::liquid::{Renderable, Context, Value, FilterError}; +use std::fs::File; +use std::io; +use std::io::Write; @@ ... @@ Err(msg) => println!("Error : {}", msg), - Ok(generated_code) => save_generated_code(generated_code.unwrap()), + Ok(generated_code) => { + match save_generated_code(generated_code.unwrap()) { + Ok(()) => {}, + Err(err) => println!("Error : {}", err), + } + }, } @@ ... @@ -pub fn save_generated_code(code : String) { - println!("{}", code); +pub fn save_generated_code(code : String) -> io::Result<()> { + let mut f = try!(File::create("out.rs")); + try!(f.write_all(code.as_bytes())); + + Ok(()) }
--- a/src/generator.rs +++ b/src/generator.rs @@ -4,2 +4,5 @@ CON use self::liquid::{Renderable, Context, Value, FilterError}; ADD use std::fs::File; ADD use std::io; ADD use std::io::Write; CON @@ -33,3 +36,8 @@ CON Err(msg) => println!("Error : {}", msg), DEL Ok(generated_code) => save_generated_code(generated_code.unwrap()), ADD Ok(generated_code) => { ADD match save_generated_code(generated_code.unwrap()) { ADD Ok(()) => {}, ADD Err(err) => println!("Error : {}", err), ADD } ADD }, CON } @@ -37,4 +45,7 @@ CON DEL pub fn save_generated_code(code : String) { DEL println!("{}", code); ADD pub fn save_generated_code(code : String) -> io::Result<()> { ADD let mut f = try!(File::create("out.rs")); ADD try!(f.write_all(code.as_bytes())); ADD ADD Ok(()) CON }
<<<<<<< SEARCH use grammar::{LexicalUnit, ProductionRule, RuleComponent, Grammar}; use self::liquid::{Renderable, Context, Value, FilterError}; static template : &'static str = include_str!("parser.liquid"); ======= use grammar::{LexicalUnit, ProductionRule, RuleComponent, Grammar}; use self::liquid::{Renderable, Context, Value, FilterError}; use std::fs::File; use std::io; use std::io::Write; static template : &'static str = include_str!("parser.liquid"); >>>>>>> REPLACE <<<<<<< SEARCH match tmplt.render(&mut ctx) { Err(msg) => println!("Error : {}", msg), Ok(generated_code) => save_generated_code(generated_code.unwrap()), } } pub fn save_generated_code(code : String) { println!("{}", code); } ======= match tmplt.render(&mut ctx) { Err(msg) => println!("Error : {}", msg), Ok(generated_code) => { match save_generated_code(generated_code.unwrap()) { Ok(()) => {}, Err(err) => println!("Error : {}", err), } }, } } pub fn save_generated_code(code : String) -> io::Result<()> { let mut f = try!(File::create("out.rs")); try!(f.write_all(code.as_bytes())); Ok(()) } >>>>>>> REPLACE
gnuvince/ppbert
5517e47b17f10227deba6d543c9468362d064364
src/main.rs
rust
mit
Add support for a few base types - small_integer - integer - atom
#[macro_use] extern crate nom; use std::io; use std::io::Read; static BERT_MAGIC_NUMBER: u8 = 131; #[derive(Debug)] enum BertTerm { Int(i32), Float(f64), Atom(String), Tuple(Vec<BertTerm>), List(Vec<BertTerm>), Binary(Vec<u8>) } #[derive(Debug)] enum BertError { InvalidMagicNumber } fn main() { }
#[macro_use] extern crate nom; use std::io; use std::io::Read; use nom::{IResult, ErrorKind}; static BERT_MAGIC_NUMBER: u8 = 131; static SMALL_INTEGER_EXT: u8 = 97; static INTEGER_EXT: u8 = 98; static ATOM_EXT: u8 = 100; #[derive(Debug)] enum BertTerm { Int(i32), Float(f64), Atom(String), Tuple(Vec<BertTerm>), List(Vec<BertTerm>), Binary(Vec<u8>) } #[derive(Debug)] enum BertError { Unknown, InvalidMagicNumber } named!(bert_magic_number, tag!([BERT_MAGIC_NUMBER])); fn small_integer(i0: &[u8]) -> IResult<&[u8], BertTerm> { let (i1, _) = try_parse!(i0, tag!([SMALL_INTEGER_EXT])); let (i2, n) = try_parse!(i1, nom::be_u8); IResult::Done(i2, BertTerm::Int(n as i32)) } fn integer(i0: &[u8]) -> IResult<&[u8], BertTerm> { let (i1, _) = try_parse!(i0, tag!([INTEGER_EXT])); let (i2, n) = try_parse!(i1, nom::be_i32); IResult::Done(i2, BertTerm::Int(n)) } fn atom(i0: &[u8]) -> IResult<&[u8], BertTerm> { let (i1, _) = try_parse!(i0, tag!([ATOM_EXT])); let (i2, len) = try_parse!(i1, nom::be_u16); let (i3, atom_name) = try_parse!(i2, take_str!(len)); IResult::Done(i3, BertTerm::Atom(atom_name.to_string())) } named!(parse_bert<&[u8], BertTerm>, chain!( bert_magic_number ~ t: alt!(small_integer | integer | atom) , || { t } )); fn main() { let mut stdin = io::stdin(); let mut buf: Vec<u8> = Vec::new(); stdin.read_to_end(&mut buf); println!("{:?}", parse_bert(&buf[..])); }
42
0
3
add_only
--- a/src/main.rs +++ b/src/main.rs @@ -6,3 +6,10 @@ +use nom::{IResult, ErrorKind}; + + static BERT_MAGIC_NUMBER: u8 = 131; + +static SMALL_INTEGER_EXT: u8 = 97; +static INTEGER_EXT: u8 = 98; +static ATOM_EXT: u8 = 100; @@ -20,2 +27,3 @@ enum BertError { + Unknown, InvalidMagicNumber @@ -23,3 +31,37 @@ +named!(bert_magic_number, tag!([BERT_MAGIC_NUMBER])); + +fn small_integer(i0: &[u8]) -> IResult<&[u8], BertTerm> { + let (i1, _) = try_parse!(i0, tag!([SMALL_INTEGER_EXT])); + let (i2, n) = try_parse!(i1, nom::be_u8); + IResult::Done(i2, BertTerm::Int(n as i32)) +} + + +fn integer(i0: &[u8]) -> IResult<&[u8], BertTerm> { + let (i1, _) = try_parse!(i0, tag!([INTEGER_EXT])); + let (i2, n) = try_parse!(i1, nom::be_i32); + IResult::Done(i2, BertTerm::Int(n)) +} + +fn atom(i0: &[u8]) -> IResult<&[u8], BertTerm> { + let (i1, _) = try_parse!(i0, tag!([ATOM_EXT])); + let (i2, len) = try_parse!(i1, nom::be_u16); + let (i3, atom_name) = try_parse!(i2, take_str!(len)); + IResult::Done(i3, BertTerm::Atom(atom_name.to_string())) +} + +named!(parse_bert<&[u8], BertTerm>, chain!( + bert_magic_number ~ + t: alt!(small_integer | integer | atom) + , + || { t } +)); + fn main() { + let mut stdin = io::stdin(); + let mut buf: Vec<u8> = Vec::new(); + + stdin.read_to_end(&mut buf); + println!("{:?}", parse_bert(&buf[..])); }
--- a/src/main.rs +++ b/src/main.rs @@ ... @@ +use nom::{IResult, ErrorKind}; + + static BERT_MAGIC_NUMBER: u8 = 131; + +static SMALL_INTEGER_EXT: u8 = 97; +static INTEGER_EXT: u8 = 98; +static ATOM_EXT: u8 = 100; @@ ... @@ enum BertError { + Unknown, InvalidMagicNumber @@ ... @@ +named!(bert_magic_number, tag!([BERT_MAGIC_NUMBER])); + +fn small_integer(i0: &[u8]) -> IResult<&[u8], BertTerm> { + let (i1, _) = try_parse!(i0, tag!([SMALL_INTEGER_EXT])); + let (i2, n) = try_parse!(i1, nom::be_u8); + IResult::Done(i2, BertTerm::Int(n as i32)) +} + + +fn integer(i0: &[u8]) -> IResult<&[u8], BertTerm> { + let (i1, _) = try_parse!(i0, tag!([INTEGER_EXT])); + let (i2, n) = try_parse!(i1, nom::be_i32); + IResult::Done(i2, BertTerm::Int(n)) +} + +fn atom(i0: &[u8]) -> IResult<&[u8], BertTerm> { + let (i1, _) = try_parse!(i0, tag!([ATOM_EXT])); + let (i2, len) = try_parse!(i1, nom::be_u16); + let (i3, atom_name) = try_parse!(i2, take_str!(len)); + IResult::Done(i3, BertTerm::Atom(atom_name.to_string())) +} + +named!(parse_bert<&[u8], BertTerm>, chain!( + bert_magic_number ~ + t: alt!(small_integer | integer | atom) + , + || { t } +)); + fn main() { + let mut stdin = io::stdin(); + let mut buf: Vec<u8> = Vec::new(); + + stdin.read_to_end(&mut buf); + println!("{:?}", parse_bert(&buf[..])); }
--- a/src/main.rs +++ b/src/main.rs @@ -6,3 +6,10 @@ CON ADD use nom::{IResult, ErrorKind}; ADD ADD CON static BERT_MAGIC_NUMBER: u8 = 131; ADD ADD static SMALL_INTEGER_EXT: u8 = 97; ADD static INTEGER_EXT: u8 = 98; ADD static ATOM_EXT: u8 = 100; CON @@ -20,2 +27,3 @@ CON enum BertError { ADD Unknown, CON InvalidMagicNumber @@ -23,3 +31,37 @@ CON ADD named!(bert_magic_number, tag!([BERT_MAGIC_NUMBER])); ADD ADD fn small_integer(i0: &[u8]) -> IResult<&[u8], BertTerm> { ADD let (i1, _) = try_parse!(i0, tag!([SMALL_INTEGER_EXT])); ADD let (i2, n) = try_parse!(i1, nom::be_u8); ADD IResult::Done(i2, BertTerm::Int(n as i32)) ADD } ADD ADD ADD fn integer(i0: &[u8]) -> IResult<&[u8], BertTerm> { ADD let (i1, _) = try_parse!(i0, tag!([INTEGER_EXT])); ADD let (i2, n) = try_parse!(i1, nom::be_i32); ADD IResult::Done(i2, BertTerm::Int(n)) ADD } ADD ADD fn atom(i0: &[u8]) -> IResult<&[u8], BertTerm> { ADD let (i1, _) = try_parse!(i0, tag!([ATOM_EXT])); ADD let (i2, len) = try_parse!(i1, nom::be_u16); ADD let (i3, atom_name) = try_parse!(i2, take_str!(len)); ADD IResult::Done(i3, BertTerm::Atom(atom_name.to_string())) ADD } ADD ADD named!(parse_bert<&[u8], BertTerm>, chain!( ADD bert_magic_number ~ ADD t: alt!(small_integer | integer | atom) ADD , ADD || { t } ADD )); ADD CON fn main() { ADD let mut stdin = io::stdin(); ADD let mut buf: Vec<u8> = Vec::new(); ADD ADD stdin.read_to_end(&mut buf); ADD println!("{:?}", parse_bert(&buf[..])); CON }
<<<<<<< SEARCH use std::io::Read; static BERT_MAGIC_NUMBER: u8 = 131; #[derive(Debug)] ======= use std::io::Read; use nom::{IResult, ErrorKind}; static BERT_MAGIC_NUMBER: u8 = 131; static SMALL_INTEGER_EXT: u8 = 97; static INTEGER_EXT: u8 = 98; static ATOM_EXT: u8 = 100; #[derive(Debug)] >>>>>>> REPLACE <<<<<<< SEARCH #[derive(Debug)] enum BertError { InvalidMagicNumber } fn main() { } ======= #[derive(Debug)] enum BertError { Unknown, InvalidMagicNumber } named!(bert_magic_number, tag!([BERT_MAGIC_NUMBER])); fn small_integer(i0: &[u8]) -> IResult<&[u8], BertTerm> { let (i1, _) = try_parse!(i0, tag!([SMALL_INTEGER_EXT])); let (i2, n) = try_parse!(i1, nom::be_u8); IResult::Done(i2, BertTerm::Int(n as i32)) } fn integer(i0: &[u8]) -> IResult<&[u8], BertTerm> { let (i1, _) = try_parse!(i0, tag!([INTEGER_EXT])); let (i2, n) = try_parse!(i1, nom::be_i32); IResult::Done(i2, BertTerm::Int(n)) } fn atom(i0: &[u8]) -> IResult<&[u8], BertTerm> { let (i1, _) = try_parse!(i0, tag!([ATOM_EXT])); let (i2, len) = try_parse!(i1, nom::be_u16); let (i3, atom_name) = try_parse!(i2, take_str!(len)); IResult::Done(i3, BertTerm::Atom(atom_name.to_string())) } named!(parse_bert<&[u8], BertTerm>, chain!( bert_magic_number ~ t: alt!(small_integer | integer | atom) , || { t } )); fn main() { let mut stdin = io::stdin(); let mut buf: Vec<u8> = Vec::new(); stdin.read_to_end(&mut buf); println!("{:?}", parse_bert(&buf[..])); } >>>>>>> REPLACE
kmcallister/rust
aa4fa2611c6d21d104b35f7e585fc32195a2d61d
src/test/run-pass/class-separate-impl.rs
rust
apache-2.0
Remove an xfail-fast, as this seems to work on Windows now
// xfail-fast // (Not sure why, though -- FIXME (tjc) import to_str::*; import to_str::to_str; class cat { priv { let mut meows : uint; fn meow() { #error("Meow"); self.meows += 1u; if self.meows % 5u == 0u { self.how_hungry += 1; } } } let mut how_hungry : int; let name : str; new(in_x : uint, in_y : int, in_name: str) { self.meows = in_x; self.how_hungry = in_y; self.name = in_name; } fn speak() { self.meow(); } fn eat() -> bool { if self.how_hungry > 0 { #error("OM NOM NOM"); self.how_hungry -= 2; ret true; } else { #error("Not hungry!"); ret false; } } } impl of to_str for cat { fn to_str() -> str { self.name } } fn print_out<T: to_str>(thing: T, expected: str) { let actual = thing.to_str(); #debug("%s", actual); assert(actual == expected); } fn main() { let nyan : to_str = cat(0u, 2, "nyan") as to_str; print_out(nyan, "nyan"); }
import to_str::*; import to_str::to_str; class cat { priv { let mut meows : uint; fn meow() { #error("Meow"); self.meows += 1u; if self.meows % 5u == 0u { self.how_hungry += 1; } } } let mut how_hungry : int; let name : str; new(in_x : uint, in_y : int, in_name: str) { self.meows = in_x; self.how_hungry = in_y; self.name = in_name; } fn speak() { self.meow(); } fn eat() -> bool { if self.how_hungry > 0 { #error("OM NOM NOM"); self.how_hungry -= 2; ret true; } else { #error("Not hungry!"); ret false; } } } impl of to_str for cat { fn to_str() -> str { self.name } } fn print_out<T: to_str>(thing: T, expected: str) { let actual = thing.to_str(); #debug("%s", actual); assert(actual == expected); } fn main() { let nyan : to_str = cat(0u, 2, "nyan") as to_str; print_out(nyan, "nyan"); }
0
2
1
del_only
--- a/src/test/run-pass/class-separate-impl.rs +++ b/src/test/run-pass/class-separate-impl.rs @@ -1,3 +1 @@ -// xfail-fast -// (Not sure why, though -- FIXME (tjc) import to_str::*;
--- a/src/test/run-pass/class-separate-impl.rs +++ b/src/test/run-pass/class-separate-impl.rs @@ ... @@ -// xfail-fast -// (Not sure why, though -- FIXME (tjc) import to_str::*;
--- a/src/test/run-pass/class-separate-impl.rs +++ b/src/test/run-pass/class-separate-impl.rs @@ -1,3 +1 @@ DEL // xfail-fast DEL // (Not sure why, though -- FIXME (tjc) CON import to_str::*;
<<<<<<< SEARCH // xfail-fast // (Not sure why, though -- FIXME (tjc) import to_str::*; import to_str::to_str; ======= import to_str::*; import to_str::to_str; >>>>>>> REPLACE
GambitResearch/suponoff
8cf4651568eb83e3b754529675bfa22abcd5223a
setup.py
python
bsd-2-clause
Fix the Operating System classifier, it was invalid
from setuptools import setup, find_packages from suponoff import __version__ as version if __name__ == '__main__': with open("README.rst") as f: long_description = f.read() setup( name="suponoff", version=version, author="Gambit Research", author_email="[email protected]", description="An alternative Supervisor web interface.", long_description=long_description, license="BSD", url="https://github.com/GambitResearch/suponoff", zip_safe=False, include_package_data=True, packages=find_packages(), scripts=[ 'suponoff-monhelper.py' ], install_requires=[ "Django >= 1.7", # just because I only tested with Django 1.7... ], classifiers=[ "Development Status :: 4 - Beta", "Environment :: Web Environment", "Framework :: Django", "Intended Audience :: Developers", "License :: OSI Approved :: BSD License", "Operating System :: Linux", "Programming Language :: Python", "Programming Language :: Python :: 3", "Programming Language :: Python :: 3.3", "Topic :: Internet :: WWW/HTTP", "Topic :: Internet :: WWW/HTTP :: Dynamic Content", "Topic :: Internet :: WWW/HTTP :: WSGI", ("Topic :: Software Development :: Libraries :: " "Application Frameworks"), "Topic :: Software Development :: Libraries :: Python Modules", "Topic :: System :: Systems Administration", ])
from setuptools import setup, find_packages from suponoff import __version__ as version if __name__ == '__main__': with open("README.rst") as f: long_description = f.read() setup( name="suponoff", version=version, author="Gambit Research", author_email="[email protected]", description="An alternative Supervisor web interface.", long_description=long_description, license="BSD", url="https://github.com/GambitResearch/suponoff", zip_safe=False, include_package_data=True, packages=find_packages(), scripts=[ 'suponoff-monhelper.py' ], install_requires=[ "Django >= 1.7", # just because I only tested with Django 1.7... ], classifiers=[ "Development Status :: 4 - Beta", "Environment :: Web Environment", "Framework :: Django", "Intended Audience :: Developers", "License :: OSI Approved :: BSD License", "Operating System :: OS Independent", "Programming Language :: Python", "Programming Language :: Python :: 3", "Programming Language :: Python :: 3.3", "Topic :: Internet :: WWW/HTTP", "Topic :: Internet :: WWW/HTTP :: Dynamic Content", "Topic :: Internet :: WWW/HTTP :: WSGI", ("Topic :: Software Development :: Libraries :: " "Application Frameworks"), "Topic :: Software Development :: Libraries :: Python Modules", "Topic :: System :: Systems Administration", ])
1
1
1
mixed
--- a/setup.py +++ b/setup.py @@ -33,3 +33,3 @@ "License :: OSI Approved :: BSD License", - "Operating System :: Linux", + "Operating System :: OS Independent", "Programming Language :: Python",
--- a/setup.py +++ b/setup.py @@ ... @@ "License :: OSI Approved :: BSD License", - "Operating System :: Linux", + "Operating System :: OS Independent", "Programming Language :: Python",
--- a/setup.py +++ b/setup.py @@ -33,3 +33,3 @@ CON "License :: OSI Approved :: BSD License", DEL "Operating System :: Linux", ADD "Operating System :: OS Independent", CON "Programming Language :: Python",
<<<<<<< SEARCH "Intended Audience :: Developers", "License :: OSI Approved :: BSD License", "Operating System :: Linux", "Programming Language :: Python", "Programming Language :: Python :: 3", ======= "Intended Audience :: Developers", "License :: OSI Approved :: BSD License", "Operating System :: OS Independent", "Programming Language :: Python", "Programming Language :: Python :: 3", >>>>>>> REPLACE
tylerFowler/redux-remote-resource
17dd0366cc79c9cbcee2caed535d022218f6d79e
lib/middleware.js
javascript
mit
Add documentation & basic cache lookup function
/** Resource Middleware @description main definition of middleware @exports @default {function} remoteResourceMiddleware **/ const remoteResourceMiddleware = () => {}; export default remoteResourceMiddleware;
/** Resource Middleware @description main definition of middleware @exports @default {function} remoteResourceMiddleware **/ /* eslint no-unused-vars:0 */ import fetch from 'isomorphic-fetch'; import RemoteResource from './RemoteResource'; import * as utils from './utils'; /** @name remoteResourceMiddleware @desc provides a function that can be used to configure the middleware's behavior, returning valid Redux middleware @param {object} conf @param {object} injectedHeaders headers to be injected on every outgoing req @param {object} statusActions HTTP Status codes w/ corresponding types or functions that will be emitted when the status code is received @returns {function} ReduxMiddleware **/ export default function remoteResourceMiddleware(conf) { return store => next => action => { // if we don't find our symbol don't even bother if (action.hasOwnProperty && !action.hasOwnProperty(RemoteResource)) return next(action); const callOpts = action[RemoteResource]; store.dispatch(action); }; } /** @private @name cacheLookup @desc consults the given state cache mapping to see if we already have a value note that this will *not* run when using actionable HTTP methods like POST @param {string} method HTTP verb, used to determine if we should cache @param {function} cacheMapping @param {boolean} nocache @param {function} emitSuccessOnCacheHit @returns {Promise<boolean>} result @TODO allow `cacheMapping` to return a value that will be supplied to the success event if `emitSuccessOnCacheHit` is true after the whole thing short circuits **/ function cacheLookup(state, { method, cacheMapping, nocache }) { if (!utils.isCacheableRequest(method) || !cacheMapping || nocache) return Promise.resolve(false); return Promise.resolve(cacheMapping(state)); }
46
2
1
mixed
--- a/lib/middleware.js +++ b/lib/middleware.js @@ -6,4 +6,48 @@ -const remoteResourceMiddleware = () => {}; +/* eslint no-unused-vars:0 */ +import fetch from 'isomorphic-fetch'; +import RemoteResource from './RemoteResource'; +import * as utils from './utils'; -export default remoteResourceMiddleware; +/** + @name remoteResourceMiddleware + @desc provides a function that can be used to configure the middleware's + behavior, returning valid Redux middleware + @param {object} conf + @param {object} injectedHeaders headers to be injected on every outgoing req + @param {object} statusActions HTTP Status codes w/ corresponding types + or functions that will be emitted when the status code is received + @returns {function} ReduxMiddleware +**/ +export default function remoteResourceMiddleware(conf) { + return store => next => action => { + // if we don't find our symbol don't even bother + if (action.hasOwnProperty && !action.hasOwnProperty(RemoteResource)) + return next(action); + + const callOpts = action[RemoteResource]; + store.dispatch(action); + }; +} + +/** + @private + @name cacheLookup + @desc consults the given state cache mapping to see if we already have a value + note that this will *not* run when using actionable HTTP methods like POST + @param {string} method HTTP verb, used to determine if we should cache + @param {function} cacheMapping + @param {boolean} nocache + @param {function} emitSuccessOnCacheHit + @returns {Promise<boolean>} result + + @TODO allow `cacheMapping` to return a value that will be supplied to the + success event if `emitSuccessOnCacheHit` is true after the whole thing + short circuits +**/ +function cacheLookup(state, { method, cacheMapping, nocache }) { + if (!utils.isCacheableRequest(method) || !cacheMapping || nocache) + return Promise.resolve(false); + + return Promise.resolve(cacheMapping(state)); +}
--- a/lib/middleware.js +++ b/lib/middleware.js @@ ... @@ -const remoteResourceMiddleware = () => {}; +/* eslint no-unused-vars:0 */ +import fetch from 'isomorphic-fetch'; +import RemoteResource from './RemoteResource'; +import * as utils from './utils'; -export default remoteResourceMiddleware; +/** + @name remoteResourceMiddleware + @desc provides a function that can be used to configure the middleware's + behavior, returning valid Redux middleware + @param {object} conf + @param {object} injectedHeaders headers to be injected on every outgoing req + @param {object} statusActions HTTP Status codes w/ corresponding types + or functions that will be emitted when the status code is received + @returns {function} ReduxMiddleware +**/ +export default function remoteResourceMiddleware(conf) { + return store => next => action => { + // if we don't find our symbol don't even bother + if (action.hasOwnProperty && !action.hasOwnProperty(RemoteResource)) + return next(action); + + const callOpts = action[RemoteResource]; + store.dispatch(action); + }; +} + +/** + @private + @name cacheLookup + @desc consults the given state cache mapping to see if we already have a value + note that this will *not* run when using actionable HTTP methods like POST + @param {string} method HTTP verb, used to determine if we should cache + @param {function} cacheMapping + @param {boolean} nocache + @param {function} emitSuccessOnCacheHit + @returns {Promise<boolean>} result + + @TODO allow `cacheMapping` to return a value that will be supplied to the + success event if `emitSuccessOnCacheHit` is true after the whole thing + short circuits +**/ +function cacheLookup(state, { method, cacheMapping, nocache }) { + if (!utils.isCacheableRequest(method) || !cacheMapping || nocache) + return Promise.resolve(false); + + return Promise.resolve(cacheMapping(state)); +}
--- a/lib/middleware.js +++ b/lib/middleware.js @@ -6,4 +6,48 @@ CON DEL const remoteResourceMiddleware = () => {}; ADD /* eslint no-unused-vars:0 */ ADD import fetch from 'isomorphic-fetch'; ADD import RemoteResource from './RemoteResource'; ADD import * as utils from './utils'; CON DEL export default remoteResourceMiddleware; ADD /** ADD @name remoteResourceMiddleware ADD @desc provides a function that can be used to configure the middleware's ADD behavior, returning valid Redux middleware ADD @param {object} conf ADD @param {object} injectedHeaders headers to be injected on every outgoing req ADD @param {object} statusActions HTTP Status codes w/ corresponding types ADD or functions that will be emitted when the status code is received ADD @returns {function} ReduxMiddleware ADD **/ ADD export default function remoteResourceMiddleware(conf) { ADD return store => next => action => { ADD // if we don't find our symbol don't even bother ADD if (action.hasOwnProperty && !action.hasOwnProperty(RemoteResource)) ADD return next(action); ADD ADD const callOpts = action[RemoteResource]; ADD store.dispatch(action); ADD }; ADD } ADD ADD /** ADD @private ADD @name cacheLookup ADD @desc consults the given state cache mapping to see if we already have a value ADD note that this will *not* run when using actionable HTTP methods like POST ADD @param {string} method HTTP verb, used to determine if we should cache ADD @param {function} cacheMapping ADD @param {boolean} nocache ADD @param {function} emitSuccessOnCacheHit ADD @returns {Promise<boolean>} result ADD ADD @TODO allow `cacheMapping` to return a value that will be supplied to the ADD success event if `emitSuccessOnCacheHit` is true after the whole thing ADD short circuits ADD **/ ADD function cacheLookup(state, { method, cacheMapping, nocache }) { ADD if (!utils.isCacheableRequest(method) || !cacheMapping || nocache) ADD return Promise.resolve(false); ADD ADD return Promise.resolve(cacheMapping(state)); ADD }
<<<<<<< SEARCH **/ const remoteResourceMiddleware = () => {}; export default remoteResourceMiddleware; ======= **/ /* eslint no-unused-vars:0 */ import fetch from 'isomorphic-fetch'; import RemoteResource from './RemoteResource'; import * as utils from './utils'; /** @name remoteResourceMiddleware @desc provides a function that can be used to configure the middleware's behavior, returning valid Redux middleware @param {object} conf @param {object} injectedHeaders headers to be injected on every outgoing req @param {object} statusActions HTTP Status codes w/ corresponding types or functions that will be emitted when the status code is received @returns {function} ReduxMiddleware **/ export default function remoteResourceMiddleware(conf) { return store => next => action => { // if we don't find our symbol don't even bother if (action.hasOwnProperty && !action.hasOwnProperty(RemoteResource)) return next(action); const callOpts = action[RemoteResource]; store.dispatch(action); }; } /** @private @name cacheLookup @desc consults the given state cache mapping to see if we already have a value note that this will *not* run when using actionable HTTP methods like POST @param {string} method HTTP verb, used to determine if we should cache @param {function} cacheMapping @param {boolean} nocache @param {function} emitSuccessOnCacheHit @returns {Promise<boolean>} result @TODO allow `cacheMapping` to return a value that will be supplied to the success event if `emitSuccessOnCacheHit` is true after the whole thing short circuits **/ function cacheLookup(state, { method, cacheMapping, nocache }) { if (!utils.isCacheableRequest(method) || !cacheMapping || nocache) return Promise.resolve(false); return Promise.resolve(cacheMapping(state)); } >>>>>>> REPLACE
jcarbaugh/django-wellknown
54b3b69d152611d55ce7db66c2c34dc2b1140cc7
wellknown/models.py
python
bsd-3-clause
Remove code that was causing a problem running syncdb. Code seems to be redundant anyway.
from django.db import models from django.db.models.signals import post_save import mimetypes import wellknown # # create default host-meta handler # from wellknown.resources import HostMeta wellknown.register('host-meta', handler=HostMeta(), content_type='application/xrd+xml') # # resource model # class Resource(models.Model): path = models.CharField(max_length=128) content = models.TextField(blank=True) content_type = models.CharField(max_length=128, blank=True) class Meta: ordering = ('path',) def __unicode__(self): return self.path def save(self, **kwargs): self.path = self.path.strip('/') if not self.content_type: self.content_type = mimetypes.guess_type(self.path)[0] or 'text/plain' super(Resource, self).save(**kwargs) # # update resources when models are saved # def save_handler(sender, **kwargs): reg = kwargs['instance'] wellknown.register( reg.path, content=reg.content, content_type=reg.content_type, update=True ) post_save.connect(save_handler, sender=Resource) # # cache resources # for res in Resource.objects.all(): wellknown.register(res.path, content=res.content, content_type=res.content_type)
from django.db import models from django.db.models.signals import post_save import mimetypes import wellknown # # create default host-meta handler # from wellknown.resources import HostMeta wellknown.register('host-meta', handler=HostMeta(), content_type='application/xrd+xml') # # resource model # class Resource(models.Model): path = models.CharField(max_length=128) content = models.TextField(blank=True) content_type = models.CharField(max_length=128, blank=True) class Meta: ordering = ('path',) def __unicode__(self): return self.path def save(self, **kwargs): self.path = self.path.strip('/') if not self.content_type: self.content_type = mimetypes.guess_type(self.path)[0] or 'text/plain' super(Resource, self).save(**kwargs) # # update resources when models are saved # def save_handler(sender, **kwargs): reg = kwargs['instance'] wellknown.register( reg.path, content=reg.content, content_type=reg.content_type, update=True ) post_save.connect(save_handler, sender=Resource)
0
7
1
del_only
--- a/wellknown/models.py +++ b/wellknown/models.py @@ -47,8 +47 @@ post_save.connect(save_handler, sender=Resource) - -# -# cache resources -# - -for res in Resource.objects.all(): - wellknown.register(res.path, content=res.content, content_type=res.content_type)
--- a/wellknown/models.py +++ b/wellknown/models.py @@ ... @@ post_save.connect(save_handler, sender=Resource) - -# -# cache resources -# - -for res in Resource.objects.all(): - wellknown.register(res.path, content=res.content, content_type=res.content_type)
--- a/wellknown/models.py +++ b/wellknown/models.py @@ -47,8 +47 @@ CON post_save.connect(save_handler, sender=Resource) DEL DEL # DEL # cache resources DEL # DEL DEL for res in Resource.objects.all(): DEL wellknown.register(res.path, content=res.content, content_type=res.content_type)
<<<<<<< SEARCH post_save.connect(save_handler, sender=Resource) # # cache resources # for res in Resource.objects.all(): wellknown.register(res.path, content=res.content, content_type=res.content_type) ======= post_save.connect(save_handler, sender=Resource) >>>>>>> REPLACE
bibaev/stream-debugger-plugin
6d444d51ae1fc69b60608f3ab9329ed775229231
src/main/java/com/intellij/debugger/streams/ui/impl/MappingPane.kt
kotlin
apache-2.0
Add workaround to align mapping pane with collection views
package com.intellij.debugger.streams.ui.impl import com.intellij.debugger.streams.ui.LinkedValuesMapping import com.intellij.debugger.streams.ui.ValueWithPosition import com.intellij.ui.JBColor import com.intellij.ui.components.JBLabel import java.awt.BorderLayout import java.awt.Graphics import javax.swing.JPanel /** * @author Vitaliy.Bibaev */ class MappingPane(private val beforeValues: List<ValueWithPosition>, private val mapping: LinkedValuesMapping) : JPanel(BorderLayout()) { init { add(JBLabel("map"), BorderLayout.NORTH) add(MyDrawPane(), BorderLayout.CENTER) } private inner class MyDrawPane : JPanel() { override fun paintComponent(g: Graphics?) { if (g == null) { return } val x1 = x val x2 = x + width for (value in beforeValues) { if (!value.isVisible) continue val position: Int = value.position val linkedValues = mapping.getLinkedValues(value) ?: continue for (nextValue in linkedValues.filter { it.isVisible }) { g.color = JBColor.BLACK g.drawLine(x1, position, x2, nextValue.position) } } } } }
package com.intellij.debugger.streams.ui.impl import com.intellij.debugger.streams.ui.LinkedValuesMapping import com.intellij.debugger.streams.ui.ValueWithPosition import com.intellij.ui.JBColor import com.intellij.ui.components.JBLabel import java.awt.BorderLayout import java.awt.Graphics import javax.swing.JPanel /** * @author Vitaliy.Bibaev */ class MappingPane(private val beforeValues: List<ValueWithPosition>, private val mapping: LinkedValuesMapping) : JPanel(BorderLayout()) { init { // TODO: fix this workaround add(JBLabel(" "), BorderLayout.NORTH) add(MyDrawPane(), BorderLayout.CENTER) } private inner class MyDrawPane : JPanel() { override fun paintComponent(g: Graphics?) { if (g == null) { return } val x1 = x val x2 = x + width for (value in beforeValues) { if (!value.isVisible) continue val position: Int = value.position val linkedValues = mapping.getLinkedValues(value) ?: continue for (nextValue in linkedValues.filter { it.isVisible }) { g.color = JBColor.BLACK g.drawLine(x1, position, x2, nextValue.position) } } } } }
2
1
1
mixed
--- a/src/main/java/com/intellij/debugger/streams/ui/impl/MappingPane.kt +++ b/src/main/java/com/intellij/debugger/streams/ui/impl/MappingPane.kt @@ -16,3 +16,4 @@ init { - add(JBLabel("map"), BorderLayout.NORTH) + // TODO: fix this workaround + add(JBLabel(" "), BorderLayout.NORTH) add(MyDrawPane(), BorderLayout.CENTER)
--- a/src/main/java/com/intellij/debugger/streams/ui/impl/MappingPane.kt +++ b/src/main/java/com/intellij/debugger/streams/ui/impl/MappingPane.kt @@ ... @@ init { - add(JBLabel("map"), BorderLayout.NORTH) + // TODO: fix this workaround + add(JBLabel(" "), BorderLayout.NORTH) add(MyDrawPane(), BorderLayout.CENTER)
--- a/src/main/java/com/intellij/debugger/streams/ui/impl/MappingPane.kt +++ b/src/main/java/com/intellij/debugger/streams/ui/impl/MappingPane.kt @@ -16,3 +16,4 @@ CON init { DEL add(JBLabel("map"), BorderLayout.NORTH) ADD // TODO: fix this workaround ADD add(JBLabel(" "), BorderLayout.NORTH) CON add(MyDrawPane(), BorderLayout.CENTER)
<<<<<<< SEARCH private val mapping: LinkedValuesMapping) : JPanel(BorderLayout()) { init { add(JBLabel("map"), BorderLayout.NORTH) add(MyDrawPane(), BorderLayout.CENTER) } ======= private val mapping: LinkedValuesMapping) : JPanel(BorderLayout()) { init { // TODO: fix this workaround add(JBLabel(" "), BorderLayout.NORTH) add(MyDrawPane(), BorderLayout.CENTER) } >>>>>>> REPLACE
unikent/astro
00dd5809a44a1bca93abdc89ada084660a6477b6
resources/assets/js/store/index.js
javascript
mit
Update store to use new Vuex plugins. Overall this feels like a much cleaner approach.
import Vue from 'vue'; import Vuex from 'vuex'; import undoRedo from '../plugins/undo-redo'; import page from './modules/page'; import definition from './modules/definition'; /* global window */ Vue.use(Vuex); const store = ( window.self === window.top ? { state: { over: { x: 0, y: 0 }, preview: { visible: false, url: '' }, wrapperStyles: {}, showIframeOverlay: false }, getters: {}, mutations: { updateOver(state, position) { state.over = position; }, changePreview(state, value) { state.preview = value; }, updateWrapperStyle(state, { prop, value }) { state.wrapperStyles = { ...state.wrapperStyles, [prop]: value }; }, showIframeOverlay(state, yes) { state.showIframeOverlay = yes; } }, actions: {}, modules: { page, definition }, plugins: [undoRedo] } : window.top.store ); window.store = store; export default new Vuex.Store(store);
import Vue from 'vue'; import Vuex from 'vuex'; import undoRedo from '../plugins/undo-redo'; import shareMutations from '../plugins/share-mutations'; import shareDevTools from '../plugins/share-devtools'; import page from './modules/page'; import definition from './modules/definition'; import Config from 'classes/Config'; /* global process */ Vue.use(Vuex); let store = new Vuex.Store({ state: { over: { x: 0, y: 0 }, preview: { visible: false, url: '' }, wrapperStyles: {}, showIframeOverlay: false }, getters: {}, mutations: { updateOver(state, position) { state.over = position; }, changePreview(state, value) { state.preview = value; }, updateWrapperStyle(state, { prop, value }) { state.wrapperStyles = { ...state.wrapperStyles, [prop]: value }; }, showIframeOverlay(state, yes) { state.showIframeOverlay = yes; } }, actions: {}, modules: { page, definition }, plugins: [ shareMutations, undoRedo, ...(Config.get('debug', false) ? [shareDevTools] : []) ], strict: process.env.NODE_ENV !== 'production' }); export default store;
46
46
2
mixed
--- a/resources/assets/js/store/index.js +++ b/resources/assets/js/store/index.js @@ -3,7 +3,9 @@ import undoRedo from '../plugins/undo-redo'; - +import shareMutations from '../plugins/share-mutations'; +import shareDevTools from '../plugins/share-devtools'; import page from './modules/page'; import definition from './modules/definition'; +import Config from 'classes/Config'; -/* global window */ +/* global process */ @@ -11,57 +13,55 @@ -const store = ( - window.self === window.top ? { +let store = new Vuex.Store({ - state: { - over: { - x: 0, - y: 0 - }, - preview: { - visible: false, - url: '' - }, - wrapperStyles: {}, - showIframeOverlay: false + state: { + over: { + x: 0, + y: 0 + }, + preview: { + visible: false, + url: '' + }, + wrapperStyles: {}, + showIframeOverlay: false + }, + + getters: {}, + + mutations: { + + updateOver(state, position) { + state.over = position; }, - getters: {}, - - mutations: { - - updateOver(state, position) { - state.over = position; - }, - - changePreview(state, value) { - state.preview = value; - }, - - updateWrapperStyle(state, { prop, value }) { - state.wrapperStyles = { ...state.wrapperStyles, [prop]: value }; - }, - - showIframeOverlay(state, yes) { - state.showIframeOverlay = yes; - } + changePreview(state, value) { + state.preview = value; }, - actions: {}, - - modules: { - page, - definition + updateWrapperStyle(state, { prop, value }) { + state.wrapperStyles = { ...state.wrapperStyles, [prop]: value }; }, - plugins: [undoRedo] + showIframeOverlay(state, yes) { + state.showIframeOverlay = yes; + } + }, - } + actions: {}, - : + modules: { + page, + definition + }, - window.top.store -); + plugins: [ + shareMutations, + undoRedo, + ...(Config.get('debug', false) ? [shareDevTools] : []) + ], -window.store = store; + strict: process.env.NODE_ENV !== 'production' -export default new Vuex.Store(store); +}); + +export default store;
--- a/resources/assets/js/store/index.js +++ b/resources/assets/js/store/index.js @@ ... @@ import undoRedo from '../plugins/undo-redo'; - +import shareMutations from '../plugins/share-mutations'; +import shareDevTools from '../plugins/share-devtools'; import page from './modules/page'; import definition from './modules/definition'; +import Config from 'classes/Config'; -/* global window */ +/* global process */ @@ ... @@ -const store = ( - window.self === window.top ? { +let store = new Vuex.Store({ - state: { - over: { - x: 0, - y: 0 - }, - preview: { - visible: false, - url: '' - }, - wrapperStyles: {}, - showIframeOverlay: false + state: { + over: { + x: 0, + y: 0 + }, + preview: { + visible: false, + url: '' + }, + wrapperStyles: {}, + showIframeOverlay: false + }, + + getters: {}, + + mutations: { + + updateOver(state, position) { + state.over = position; }, - getters: {}, - - mutations: { - - updateOver(state, position) { - state.over = position; - }, - - changePreview(state, value) { - state.preview = value; - }, - - updateWrapperStyle(state, { prop, value }) { - state.wrapperStyles = { ...state.wrapperStyles, [prop]: value }; - }, - - showIframeOverlay(state, yes) { - state.showIframeOverlay = yes; - } + changePreview(state, value) { + state.preview = value; }, - actions: {}, - - modules: { - page, - definition + updateWrapperStyle(state, { prop, value }) { + state.wrapperStyles = { ...state.wrapperStyles, [prop]: value }; }, - plugins: [undoRedo] + showIframeOverlay(state, yes) { + state.showIframeOverlay = yes; + } + }, - } + actions: {}, - : + modules: { + page, + definition + }, - window.top.store -); + plugins: [ + shareMutations, + undoRedo, + ...(Config.get('debug', false) ? [shareDevTools] : []) + ], -window.store = store; + strict: process.env.NODE_ENV !== 'production' -export default new Vuex.Store(store); +}); + +export default store;
--- a/resources/assets/js/store/index.js +++ b/resources/assets/js/store/index.js @@ -3,7 +3,9 @@ CON import undoRedo from '../plugins/undo-redo'; DEL ADD import shareMutations from '../plugins/share-mutations'; ADD import shareDevTools from '../plugins/share-devtools'; CON import page from './modules/page'; CON import definition from './modules/definition'; ADD import Config from 'classes/Config'; CON DEL /* global window */ ADD /* global process */ CON @@ -11,57 +13,55 @@ CON DEL const store = ( DEL window.self === window.top ? { ADD let store = new Vuex.Store({ CON DEL state: { DEL over: { DEL x: 0, DEL y: 0 DEL }, DEL preview: { DEL visible: false, DEL url: '' DEL }, DEL wrapperStyles: {}, DEL showIframeOverlay: false ADD state: { ADD over: { ADD x: 0, ADD y: 0 ADD }, ADD preview: { ADD visible: false, ADD url: '' ADD }, ADD wrapperStyles: {}, ADD showIframeOverlay: false ADD }, ADD ADD getters: {}, ADD ADD mutations: { ADD ADD updateOver(state, position) { ADD state.over = position; CON }, CON DEL getters: {}, DEL DEL mutations: { DEL DEL updateOver(state, position) { DEL state.over = position; DEL }, DEL DEL changePreview(state, value) { DEL state.preview = value; DEL }, DEL DEL updateWrapperStyle(state, { prop, value }) { DEL state.wrapperStyles = { ...state.wrapperStyles, [prop]: value }; DEL }, DEL DEL showIframeOverlay(state, yes) { DEL state.showIframeOverlay = yes; DEL } ADD changePreview(state, value) { ADD state.preview = value; CON }, CON DEL actions: {}, DEL DEL modules: { DEL page, DEL definition ADD updateWrapperStyle(state, { prop, value }) { ADD state.wrapperStyles = { ...state.wrapperStyles, [prop]: value }; CON }, CON DEL plugins: [undoRedo] ADD showIframeOverlay(state, yes) { ADD state.showIframeOverlay = yes; ADD } ADD }, CON DEL } ADD actions: {}, CON DEL : ADD modules: { ADD page, ADD definition ADD }, CON DEL window.top.store DEL ); ADD plugins: [ ADD shareMutations, ADD undoRedo, ADD ...(Config.get('debug', false) ? [shareDevTools] : []) ADD ], CON DEL window.store = store; ADD strict: process.env.NODE_ENV !== 'production' CON DEL export default new Vuex.Store(store); ADD }); ADD ADD export default store;
<<<<<<< SEARCH import Vuex from 'vuex'; import undoRedo from '../plugins/undo-redo'; import page from './modules/page'; import definition from './modules/definition'; /* global window */ Vue.use(Vuex); const store = ( window.self === window.top ? { state: { over: { x: 0, y: 0 }, preview: { visible: false, url: '' }, wrapperStyles: {}, showIframeOverlay: false }, getters: {}, mutations: { updateOver(state, position) { state.over = position; }, changePreview(state, value) { state.preview = value; }, updateWrapperStyle(state, { prop, value }) { state.wrapperStyles = { ...state.wrapperStyles, [prop]: value }; }, showIframeOverlay(state, yes) { state.showIframeOverlay = yes; } }, actions: {}, modules: { page, definition }, plugins: [undoRedo] } : window.top.store ); window.store = store; export default new Vuex.Store(store); ======= import Vuex from 'vuex'; import undoRedo from '../plugins/undo-redo'; import shareMutations from '../plugins/share-mutations'; import shareDevTools from '../plugins/share-devtools'; import page from './modules/page'; import definition from './modules/definition'; import Config from 'classes/Config'; /* global process */ Vue.use(Vuex); let store = new Vuex.Store({ state: { over: { x: 0, y: 0 }, preview: { visible: false, url: '' }, wrapperStyles: {}, showIframeOverlay: false }, getters: {}, mutations: { updateOver(state, position) { state.over = position; }, changePreview(state, value) { state.preview = value; }, updateWrapperStyle(state, { prop, value }) { state.wrapperStyles = { ...state.wrapperStyles, [prop]: value }; }, showIframeOverlay(state, yes) { state.showIframeOverlay = yes; } }, actions: {}, modules: { page, definition }, plugins: [ shareMutations, undoRedo, ...(Config.get('debug', false) ? [shareDevTools] : []) ], strict: process.env.NODE_ENV !== 'production' }); export default store; >>>>>>> REPLACE
danylaporte/rust-asyncplify
048ab12ec24b913b01574196798236880b9b0ffe
src/subscription.rs
rust
mit
Add Subscription funct operator and remove close since it was not possible to be used correctly anyway
use consumer::*; /// Represents a subscription to a `Stream` pub struct Subscription { closed: bool, } impl Subscription { /// Closes the `Subscription` pub fn close(mut self) { self.closed = true; } pub fn new() -> Self { Subscription { closed: false } } } impl<T> Consumer<T> for Subscription { fn emit(&mut self, _: T) -> bool { !self.closed } }
use consumer::*; /// Represents a subscription to a `Stream` pub struct Subscription; /// Represents a subscription to a `Stream` based on an action pub struct SubscriptionAction<F> { f: F, } /// Represents a subscription to a `Stream` based on a func pub struct SubscriptionFunc<F> { predicate: F, } impl<F> SubscriptionAction<F> { /// Creates a new `SubscriptionAction` pub fn new(f: F) -> Self { SubscriptionAction { f: f } } } impl<F> SubscriptionFunc<F> { /// Creates a new `SubscriptionFunc` pub fn new(f: F) -> Self { SubscriptionFunc { predicate: f } } } impl<T> Consumer<T> for Subscription { fn emit(&mut self, _: T) -> bool { true } } impl<F, T> Consumer<T> for SubscriptionAction<F> where F: FnMut(T) { fn emit(&mut self, item: T) -> bool { (self.f)(item); true } } impl<F, T> Consumer<T> for SubscriptionFunc<F> where F: FnMut(T) -> bool { fn emit(&mut self, item: T) -> bool { (self.predicate)(item) } }
37
9
2
mixed
--- a/src/subscription.rs +++ b/src/subscription.rs @@ -3,14 +3,25 @@ /// Represents a subscription to a `Stream` -pub struct Subscription { - closed: bool, +pub struct Subscription; + +/// Represents a subscription to a `Stream` based on an action +pub struct SubscriptionAction<F> { + f: F, } -impl Subscription { - /// Closes the `Subscription` - pub fn close(mut self) { - self.closed = true; +/// Represents a subscription to a `Stream` based on a func +pub struct SubscriptionFunc<F> { + predicate: F, +} + +impl<F> SubscriptionAction<F> { + /// Creates a new `SubscriptionAction` + pub fn new(f: F) -> Self { + SubscriptionAction { f: f } } +} - pub fn new() -> Self { - Subscription { closed: false } +impl<F> SubscriptionFunc<F> { + /// Creates a new `SubscriptionFunc` + pub fn new(f: F) -> Self { + SubscriptionFunc { predicate: f } } @@ -20,4 +31,21 @@ fn emit(&mut self, _: T) -> bool { - !self.closed + true } } + +impl<F, T> Consumer<T> for SubscriptionAction<F> + where F: FnMut(T) +{ + fn emit(&mut self, item: T) -> bool { + (self.f)(item); + true + } +} + +impl<F, T> Consumer<T> for SubscriptionFunc<F> + where F: FnMut(T) -> bool +{ + fn emit(&mut self, item: T) -> bool { + (self.predicate)(item) + } +}
--- a/src/subscription.rs +++ b/src/subscription.rs @@ ... @@ /// Represents a subscription to a `Stream` -pub struct Subscription { - closed: bool, +pub struct Subscription; + +/// Represents a subscription to a `Stream` based on an action +pub struct SubscriptionAction<F> { + f: F, } -impl Subscription { - /// Closes the `Subscription` - pub fn close(mut self) { - self.closed = true; +/// Represents a subscription to a `Stream` based on a func +pub struct SubscriptionFunc<F> { + predicate: F, +} + +impl<F> SubscriptionAction<F> { + /// Creates a new `SubscriptionAction` + pub fn new(f: F) -> Self { + SubscriptionAction { f: f } } +} - pub fn new() -> Self { - Subscription { closed: false } +impl<F> SubscriptionFunc<F> { + /// Creates a new `SubscriptionFunc` + pub fn new(f: F) -> Self { + SubscriptionFunc { predicate: f } } @@ ... @@ fn emit(&mut self, _: T) -> bool { - !self.closed + true } } + +impl<F, T> Consumer<T> for SubscriptionAction<F> + where F: FnMut(T) +{ + fn emit(&mut self, item: T) -> bool { + (self.f)(item); + true + } +} + +impl<F, T> Consumer<T> for SubscriptionFunc<F> + where F: FnMut(T) -> bool +{ + fn emit(&mut self, item: T) -> bool { + (self.predicate)(item) + } +}
--- a/src/subscription.rs +++ b/src/subscription.rs @@ -3,14 +3,25 @@ CON /// Represents a subscription to a `Stream` DEL pub struct Subscription { DEL closed: bool, ADD pub struct Subscription; ADD ADD /// Represents a subscription to a `Stream` based on an action ADD pub struct SubscriptionAction<F> { ADD f: F, CON } CON DEL impl Subscription { DEL /// Closes the `Subscription` DEL pub fn close(mut self) { DEL self.closed = true; ADD /// Represents a subscription to a `Stream` based on a func ADD pub struct SubscriptionFunc<F> { ADD predicate: F, ADD } ADD ADD impl<F> SubscriptionAction<F> { ADD /// Creates a new `SubscriptionAction` ADD pub fn new(f: F) -> Self { ADD SubscriptionAction { f: f } CON } ADD } CON DEL pub fn new() -> Self { DEL Subscription { closed: false } ADD impl<F> SubscriptionFunc<F> { ADD /// Creates a new `SubscriptionFunc` ADD pub fn new(f: F) -> Self { ADD SubscriptionFunc { predicate: f } CON } @@ -20,4 +31,21 @@ CON fn emit(&mut self, _: T) -> bool { DEL !self.closed ADD true CON } CON } ADD ADD impl<F, T> Consumer<T> for SubscriptionAction<F> ADD where F: FnMut(T) ADD { ADD fn emit(&mut self, item: T) -> bool { ADD (self.f)(item); ADD true ADD } ADD } ADD ADD impl<F, T> Consumer<T> for SubscriptionFunc<F> ADD where F: FnMut(T) -> bool ADD { ADD fn emit(&mut self, item: T) -> bool { ADD (self.predicate)(item) ADD } ADD }
<<<<<<< SEARCH /// Represents a subscription to a `Stream` pub struct Subscription { closed: bool, } impl Subscription { /// Closes the `Subscription` pub fn close(mut self) { self.closed = true; } pub fn new() -> Self { Subscription { closed: false } } } impl<T> Consumer<T> for Subscription { fn emit(&mut self, _: T) -> bool { !self.closed } } ======= /// Represents a subscription to a `Stream` pub struct Subscription; /// Represents a subscription to a `Stream` based on an action pub struct SubscriptionAction<F> { f: F, } /// Represents a subscription to a `Stream` based on a func pub struct SubscriptionFunc<F> { predicate: F, } impl<F> SubscriptionAction<F> { /// Creates a new `SubscriptionAction` pub fn new(f: F) -> Self { SubscriptionAction { f: f } } } impl<F> SubscriptionFunc<F> { /// Creates a new `SubscriptionFunc` pub fn new(f: F) -> Self { SubscriptionFunc { predicate: f } } } impl<T> Consumer<T> for Subscription { fn emit(&mut self, _: T) -> bool { true } } impl<F, T> Consumer<T> for SubscriptionAction<F> where F: FnMut(T) { fn emit(&mut self, item: T) -> bool { (self.f)(item); true } } impl<F, T> Consumer<T> for SubscriptionFunc<F> where F: FnMut(T) -> bool { fn emit(&mut self, item: T) -> bool { (self.predicate)(item) } } >>>>>>> REPLACE
danielzy95/automaton-visualizer
f854d864bab0a844c640b1423a5129bd2fa4412d
app/errors.js
javascript
mit
Add error for invalid pda transitions
export class UnknownCharError extends Error { constructor(unknownChar) { super(`Character '${unknownChar}' is not a part of the alphabet.`) } } export class UnknownStateError extends Error { constructor(stateName) { super(`State '${stateName}' doesn't exist in the automata.`) } } export class DeterminismError extends Error { constructor(state, a) { super(`State '${state}' already has a transition with character '${a}'.`) } } export class NoInitialStateError extends Error { constructor() { super('No initial state has been set.') } } export class DuplicateStateError extends Error { constructor(name) { super(`State '${name}' already exists.`) } } export class DuplicateTransitionError extends Error { constructor({ from, a, to }) { super(`A transition from state '${from}' with char '${a}' to state '${to}' already exists.`) } } export class NoFinalStatesError extends Error { constructor() { super('No final states have been defined.') } }
export class UnknownCharError extends Error { constructor(unknownChar) { super(`Character '${unknownChar}' is not a part of the alphabet.`) } } export class UnknownStateError extends Error { constructor(stateName) { super(`State '${stateName}' doesn't exist in the automata.`) } } export class DeterminismError extends Error { constructor(state, a) { super(`State '${state}' already has a transition with character '${a}'.`) } } export class NoInitialStateError extends Error { constructor() { super('No initial state has been set.') } } export class DuplicateStateError extends Error { constructor(name) { super(`State '${name}' already exists.`) } } export class DuplicateTransitionError extends Error { constructor({ from, a, to }) { super(`A transition from state '${from}' with char '${a}' to state '${to}' already exists.`) } } export class NoFinalStatesError extends Error { constructor() { super('No final states have been defined.') } } export class InvalidPDATransition extends Error { constructor(str) { super(`Invalid transition symbol '${str}'.`) } }
6
0
1
add_only
--- a/app/errors.js +++ b/app/errors.js @@ -41 +41,7 @@ } + +export class InvalidPDATransition extends Error { + constructor(str) { + super(`Invalid transition symbol '${str}'.`) + } +}
--- a/app/errors.js +++ b/app/errors.js @@ ... @@ } + +export class InvalidPDATransition extends Error { + constructor(str) { + super(`Invalid transition symbol '${str}'.`) + } +}
--- a/app/errors.js +++ b/app/errors.js @@ -41 +41,7 @@ CON } ADD ADD export class InvalidPDATransition extends Error { ADD constructor(str) { ADD super(`Invalid transition symbol '${str}'.`) ADD } ADD }
<<<<<<< SEARCH super('No final states have been defined.') } } ======= super('No final states have been defined.') } } export class InvalidPDATransition extends Error { constructor(str) { super(`Invalid transition symbol '${str}'.`) } } >>>>>>> REPLACE
aptos-labs/aptos-core
5b00d5498dfc64df66860a77e2a884c952a0668a
network/src/protocols/direct_send/mod.rs
rust
apache-2.0
[network] Add documentation for Direct Send Message
// Copyright (c) The Diem Core Contributors // SPDX-License-Identifier: Apache-2.0 use crate::{protocols::network::SerializedRequest, ProtocolId}; use bytes::Bytes; use serde::Serialize; use std::fmt::Debug; // TODO(philiphayes): just use wire::DirectSendMsg directly #[derive(Clone, Eq, PartialEq, Serialize)] pub struct Message { /// Message type. pub protocol_id: ProtocolId, /// Serialized message data. #[serde(skip)] pub mdata: Bytes, } impl Debug for Message { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { let mdata_str = if self.mdata.len() <= 10 { format!("{:?}", self.mdata) } else { format!("{:?}...", self.mdata.slice(..10)) }; write!( f, "Message {{ protocol: {:?}, mdata: {} }}", self.protocol_id, mdata_str ) } } impl SerializedRequest for Message { fn protocol_id(&self) -> ProtocolId { self.protocol_id } fn data(&self) -> &Bytes { &self.mdata } }
// Copyright (c) The Diem Core Contributors // SPDX-License-Identifier: Apache-2.0 use crate::{protocols::network::SerializedRequest, ProtocolId}; use bytes::Bytes; use serde::Serialize; use std::fmt::Debug; #[derive(Clone, Eq, PartialEq, Serialize)] pub struct Message { /// The [`ProtocolId`] for which of our upstream application modules should /// handle (i.e., deserialize and then respond to) this inbound rpc request. /// /// For example, if `protocol_id == ProtocolId::ConsensusRpc`, then this /// inbound rpc request will be dispatched to consensus for handling. pub protocol_id: ProtocolId, /// The serialized request data received from the sender. At this layer in /// the stack, the request data is just an opaque blob and will only be fully /// deserialized later in the handling application module. #[serde(skip)] pub mdata: Bytes, } impl Debug for Message { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { let mdata_str = if self.mdata.len() <= 10 { format!("{:?}", self.mdata) } else { format!("{:?}...", self.mdata.slice(..10)) }; write!( f, "Message {{ protocol: {:?}, mdata: {} }}", self.protocol_id, mdata_str ) } } impl SerializedRequest for Message { fn protocol_id(&self) -> ProtocolId { self.protocol_id } fn data(&self) -> &Bytes { &self.mdata } }
8
4
1
mixed
--- a/network/src/protocols/direct_send/mod.rs +++ b/network/src/protocols/direct_send/mod.rs @@ -8,9 +8,13 @@ -// TODO(philiphayes): just use wire::DirectSendMsg directly - #[derive(Clone, Eq, PartialEq, Serialize)] pub struct Message { - /// Message type. + /// The [`ProtocolId`] for which of our upstream application modules should + /// handle (i.e., deserialize and then respond to) this inbound rpc request. + /// + /// For example, if `protocol_id == ProtocolId::ConsensusRpc`, then this + /// inbound rpc request will be dispatched to consensus for handling. pub protocol_id: ProtocolId, - /// Serialized message data. + /// The serialized request data received from the sender. At this layer in + /// the stack, the request data is just an opaque blob and will only be fully + /// deserialized later in the handling application module. #[serde(skip)]
--- a/network/src/protocols/direct_send/mod.rs +++ b/network/src/protocols/direct_send/mod.rs @@ ... @@ -// TODO(philiphayes): just use wire::DirectSendMsg directly - #[derive(Clone, Eq, PartialEq, Serialize)] pub struct Message { - /// Message type. + /// The [`ProtocolId`] for which of our upstream application modules should + /// handle (i.e., deserialize and then respond to) this inbound rpc request. + /// + /// For example, if `protocol_id == ProtocolId::ConsensusRpc`, then this + /// inbound rpc request will be dispatched to consensus for handling. pub protocol_id: ProtocolId, - /// Serialized message data. + /// The serialized request data received from the sender. At this layer in + /// the stack, the request data is just an opaque blob and will only be fully + /// deserialized later in the handling application module. #[serde(skip)]
--- a/network/src/protocols/direct_send/mod.rs +++ b/network/src/protocols/direct_send/mod.rs @@ -8,9 +8,13 @@ CON DEL // TODO(philiphayes): just use wire::DirectSendMsg directly DEL CON #[derive(Clone, Eq, PartialEq, Serialize)] CON pub struct Message { DEL /// Message type. ADD /// The [`ProtocolId`] for which of our upstream application modules should ADD /// handle (i.e., deserialize and then respond to) this inbound rpc request. ADD /// ADD /// For example, if `protocol_id == ProtocolId::ConsensusRpc`, then this ADD /// inbound rpc request will be dispatched to consensus for handling. CON pub protocol_id: ProtocolId, DEL /// Serialized message data. ADD /// The serialized request data received from the sender. At this layer in ADD /// the stack, the request data is just an opaque blob and will only be fully ADD /// deserialized later in the handling application module. CON #[serde(skip)]
<<<<<<< SEARCH use std::fmt::Debug; // TODO(philiphayes): just use wire::DirectSendMsg directly #[derive(Clone, Eq, PartialEq, Serialize)] pub struct Message { /// Message type. pub protocol_id: ProtocolId, /// Serialized message data. #[serde(skip)] pub mdata: Bytes, ======= use std::fmt::Debug; #[derive(Clone, Eq, PartialEq, Serialize)] pub struct Message { /// The [`ProtocolId`] for which of our upstream application modules should /// handle (i.e., deserialize and then respond to) this inbound rpc request. /// /// For example, if `protocol_id == ProtocolId::ConsensusRpc`, then this /// inbound rpc request will be dispatched to consensus for handling. pub protocol_id: ProtocolId, /// The serialized request data received from the sender. At this layer in /// the stack, the request data is just an opaque blob and will only be fully /// deserialized later in the handling application module. #[serde(skip)] pub mdata: Bytes, >>>>>>> REPLACE
imazen/imageflow
e95b7c2cfbdde9a8e86878687ea3e0adb823eb58
imageflow_helpers/src/lib.rs
rust
agpl-3.0
Make imageflow_helpers run tests under system alloc
#[macro_use] extern crate lazy_static; extern crate reqwest; extern crate hyper_native_tls; extern crate regex; extern crate hyper; extern crate blake2_rfc; extern crate twox_hash; extern crate chrono; extern crate zip; extern crate serde; extern crate serde_json; extern crate libc; extern crate backtrace; #[cfg(not(any(target_os = "windows", target_os = "macos")))] extern crate openssl; #[cfg(not(any(target_os = "windows", target_os = "macos")))] extern crate hyper_openssl; pub mod identifier_styles; pub mod preludes; pub mod filesystem; pub mod fetching; pub mod caching; pub mod hashing; pub mod process_testing; pub mod process_capture; pub mod colors; pub mod debug; pub mod timeywimey{ pub fn time_bucket(seconds_per_bucket: u64, bucket_count: u64) -> u64{ ::std::time::SystemTime::now().duration_since(::std::time::UNIX_EPOCH).unwrap().as_secs() / seconds_per_bucket % bucket_count } pub use chrono::UTC; } #[cfg(test)] mod tests { #[test] fn it_works() { } }
#![feature(alloc_system)] extern crate alloc_system; #[macro_use] extern crate lazy_static; extern crate reqwest; extern crate hyper_native_tls; extern crate regex; extern crate hyper; extern crate blake2_rfc; extern crate twox_hash; extern crate chrono; extern crate zip; extern crate serde; extern crate serde_json; extern crate libc; extern crate backtrace; #[cfg(not(any(target_os = "windows", target_os = "macos")))] extern crate openssl; #[cfg(not(any(target_os = "windows", target_os = "macos")))] extern crate hyper_openssl; pub mod identifier_styles; pub mod preludes; pub mod filesystem; pub mod fetching; pub mod caching; pub mod hashing; pub mod process_testing; pub mod process_capture; pub mod colors; pub mod debug; pub mod timeywimey{ pub fn time_bucket(seconds_per_bucket: u64, bucket_count: u64) -> u64{ ::std::time::SystemTime::now().duration_since(::std::time::UNIX_EPOCH).unwrap().as_secs() / seconds_per_bucket % bucket_count } pub use chrono::UTC; } #[cfg(test)] mod tests { #[test] fn it_works() { } }
5
0
1
add_only
--- a/imageflow_helpers/src/lib.rs +++ b/imageflow_helpers/src/lib.rs @@ -1 +1,6 @@ +#![feature(alloc_system)] + +extern crate alloc_system; + + #[macro_use]
--- a/imageflow_helpers/src/lib.rs +++ b/imageflow_helpers/src/lib.rs @@ ... @@ +#![feature(alloc_system)] + +extern crate alloc_system; + + #[macro_use]
--- a/imageflow_helpers/src/lib.rs +++ b/imageflow_helpers/src/lib.rs @@ -1 +1,6 @@ ADD #![feature(alloc_system)] ADD ADD extern crate alloc_system; ADD ADD CON #[macro_use]
<<<<<<< SEARCH #[macro_use] extern crate lazy_static; ======= #![feature(alloc_system)] extern crate alloc_system; #[macro_use] extern crate lazy_static; >>>>>>> REPLACE
jaraco/jaraco.classes
3cacced39d9cb8bd5d6a2b3db8aa4b5aa1b37f58
jaraco/util/meta.py
python
mit
Allow attribute to be customized in TagRegistered
""" meta.py Some useful metaclasses. """ from __future__ import unicode_literals class LeafClassesMeta(type): """ A metaclass for classes that keeps track of all of them that aren't base classes. """ _leaf_classes = set() def __init__(cls, name, bases, attrs): if not hasattr(cls, '_leaf_classes'): cls._leaf_classes = set() leaf_classes = getattr(cls, '_leaf_classes') leaf_classes.add(cls) # remove any base classes leaf_classes -= set(bases) class TagRegistered(type): """ As classes of this metaclass are created, they keep a registry in the base class of all classes by a class attribute, 'tag'. """ def __init__(cls, name, bases, namespace): super(TagRegistered, cls).__init__(name, bases, namespace) if not hasattr(cls, '_registry'): cls._registry = {} attr = getattr(cls, 'tag', None) if attr: cls._registry[attr] = cls
""" meta.py Some useful metaclasses. """ from __future__ import unicode_literals class LeafClassesMeta(type): """ A metaclass for classes that keeps track of all of them that aren't base classes. """ _leaf_classes = set() def __init__(cls, name, bases, attrs): if not hasattr(cls, '_leaf_classes'): cls._leaf_classes = set() leaf_classes = getattr(cls, '_leaf_classes') leaf_classes.add(cls) # remove any base classes leaf_classes -= set(bases) class TagRegistered(type): """ As classes of this metaclass are created, they keep a registry in the base class of all classes by a class attribute, indicated by attr_name. """ attr_name = 'tag' def __init__(cls, name, bases, namespace): super(TagRegistered, cls).__init__(name, bases, namespace) if not hasattr(cls, '_registry'): cls._registry = {} meta = cls.__class__ attr = getattr(cls, meta.attr_name, None) if attr: cls._registry[attr] = cls
5
2
2
mixed
--- a/jaraco/util/meta.py +++ b/jaraco/util/meta.py @@ -28,4 +28,6 @@ As classes of this metaclass are created, they keep a registry in the - base class of all classes by a class attribute, 'tag'. + base class of all classes by a class attribute, indicated by attr_name. """ + attr_name = 'tag' + def __init__(cls, name, bases, namespace): @@ -34,3 +36,4 @@ cls._registry = {} - attr = getattr(cls, 'tag', None) + meta = cls.__class__ + attr = getattr(cls, meta.attr_name, None) if attr:
--- a/jaraco/util/meta.py +++ b/jaraco/util/meta.py @@ ... @@ As classes of this metaclass are created, they keep a registry in the - base class of all classes by a class attribute, 'tag'. + base class of all classes by a class attribute, indicated by attr_name. """ + attr_name = 'tag' + def __init__(cls, name, bases, namespace): @@ ... @@ cls._registry = {} - attr = getattr(cls, 'tag', None) + meta = cls.__class__ + attr = getattr(cls, meta.attr_name, None) if attr:
--- a/jaraco/util/meta.py +++ b/jaraco/util/meta.py @@ -28,4 +28,6 @@ CON As classes of this metaclass are created, they keep a registry in the DEL base class of all classes by a class attribute, 'tag'. ADD base class of all classes by a class attribute, indicated by attr_name. CON """ ADD attr_name = 'tag' ADD CON def __init__(cls, name, bases, namespace): @@ -34,3 +36,4 @@ CON cls._registry = {} DEL attr = getattr(cls, 'tag', None) ADD meta = cls.__class__ ADD attr = getattr(cls, meta.attr_name, None) CON if attr:
<<<<<<< SEARCH """ As classes of this metaclass are created, they keep a registry in the base class of all classes by a class attribute, 'tag'. """ def __init__(cls, name, bases, namespace): super(TagRegistered, cls).__init__(name, bases, namespace) if not hasattr(cls, '_registry'): cls._registry = {} attr = getattr(cls, 'tag', None) if attr: cls._registry[attr] = cls ======= """ As classes of this metaclass are created, they keep a registry in the base class of all classes by a class attribute, indicated by attr_name. """ attr_name = 'tag' def __init__(cls, name, bases, namespace): super(TagRegistered, cls).__init__(name, bases, namespace) if not hasattr(cls, '_registry'): cls._registry = {} meta = cls.__class__ attr = getattr(cls, meta.attr_name, None) if attr: cls._registry[attr] = cls >>>>>>> REPLACE
14jesenskyd/PlayerTracker-Java
34d3a2aa3474433e6d4c4c85b7c7c971688209a4
Server/src/main/java/me/jesensky/dan/playertracker/util/DatabaseManager.java
java
artistic-2.0
Rewrite part of how the database manager works (specifically defaults)
package me.jesensky.dan.playertracker.util; import com.mysql.jdbc.MySQLConnection; import com.mysql.jdbc.jdbc2.optional.MysqlDataSource; import javax.sql.DataSource; import java.sql.Connection; import java.sql.DriverManager; import java.sql.SQLException; public class DatabaseManager { private Connection connection; private String host; private String username; private String password; private String table; private String db; private int port; public DatabaseManager(String host, int port, String user, String password, String table, String db, String defaults) throws SQLException { this.host = host; this.port = port; this.username = user; this.password = password; this.table = table; this.db = db; this.connect(defaults); } private void connect(String defaults){ MysqlDataSource d = new MysqlDataSource(); d.setUser(this.username); d.setPassword(this.password); d.setServerName(this.host); d.setPort(this.port); d.setDatabaseName(this.db); } private void connect(){ } }
package me.jesensky.dan.playertracker.util; import com.mysql.jdbc.jdbc2.optional.MysqlDataSource; import java.sql.Connection; import java.sql.PreparedStatement; import java.sql.SQLException; public class DatabaseManager { private Connection connection; private String host; private String username; private String password; private String table; private String db; private int port; public static final String PLAYER_DB_DEFAULT = "SELECT * FROM information_schema.tables WHERE table_schema = '?' AND table_name = 'players' LIMIT 1;"; public static final String USER_DB_DEFAULT = "SELECT * FROM information_schema.tables WHERE table_schema = '?' AND table_name = 'users' LIMIT 1;"; public DatabaseManager(String host, int port, String user, String password, String db, String table) throws SQLException { super(); this.host = host; this.port = port; this.username = user; this.password = password; this.table = table; this.db = db; } public void connect(String defaults, String... args) throws SQLException{ this.connect(); PreparedStatement statement = this.connection.prepareStatement(defaults); for(int x = 0; x < args.length; x++) try { statement.setInt(x, Integer.parseInt(args[x])); }catch(NumberFormatException e){ statement.setString(x, args[x]); } statement.execute(); statement.close(); } public void connect() throws SQLException{ MysqlDataSource d = new MysqlDataSource(); d.setUser(this.username); d.setPassword(this.password); d.setServerName(this.host); d.setPort(this.port); d.setDatabaseName(this.db); this.connection = d.getConnection(); } public PreparedStatement prepareStatement(String sql) throws SQLException{ return this.connection.prepareStatement(sql); } }
22
8
4
mixed
--- a/Server/src/main/java/me/jesensky/dan/playertracker/util/DatabaseManager.java +++ b/Server/src/main/java/me/jesensky/dan/playertracker/util/DatabaseManager.java @@ -2,8 +2,6 @@ -import com.mysql.jdbc.MySQLConnection; import com.mysql.jdbc.jdbc2.optional.MysqlDataSource; -import javax.sql.DataSource; import java.sql.Connection; -import java.sql.DriverManager; +import java.sql.PreparedStatement; import java.sql.SQLException; @@ -18,4 +16,7 @@ private int port; + public static final String PLAYER_DB_DEFAULT = "SELECT * FROM information_schema.tables WHERE table_schema = '?' AND table_name = 'players' LIMIT 1;"; + public static final String USER_DB_DEFAULT = "SELECT * FROM information_schema.tables WHERE table_schema = '?' AND table_name = 'users' LIMIT 1;"; - public DatabaseManager(String host, int port, String user, String password, String table, String db, String defaults) throws SQLException { + public DatabaseManager(String host, int port, String user, String password, String db, String table) throws SQLException { + super(); this.host = host; @@ -26,6 +27,18 @@ this.db = db; - this.connect(defaults); } - private void connect(String defaults){ + public void connect(String defaults, String... args) throws SQLException{ + this.connect(); + PreparedStatement statement = this.connection.prepareStatement(defaults); + for(int x = 0; x < args.length; x++) + try { + statement.setInt(x, Integer.parseInt(args[x])); + }catch(NumberFormatException e){ + statement.setString(x, args[x]); + } + statement.execute(); + statement.close(); + } + + public void connect() throws SQLException{ MysqlDataSource d = new MysqlDataSource(); @@ -36,6 +49,7 @@ d.setDatabaseName(this.db); + this.connection = d.getConnection(); } - private void connect(){ - + public PreparedStatement prepareStatement(String sql) throws SQLException{ + return this.connection.prepareStatement(sql); }
--- a/Server/src/main/java/me/jesensky/dan/playertracker/util/DatabaseManager.java +++ b/Server/src/main/java/me/jesensky/dan/playertracker/util/DatabaseManager.java @@ ... @@ -import com.mysql.jdbc.MySQLConnection; import com.mysql.jdbc.jdbc2.optional.MysqlDataSource; -import javax.sql.DataSource; import java.sql.Connection; -import java.sql.DriverManager; +import java.sql.PreparedStatement; import java.sql.SQLException; @@ ... @@ private int port; + public static final String PLAYER_DB_DEFAULT = "SELECT * FROM information_schema.tables WHERE table_schema = '?' AND table_name = 'players' LIMIT 1;"; + public static final String USER_DB_DEFAULT = "SELECT * FROM information_schema.tables WHERE table_schema = '?' AND table_name = 'users' LIMIT 1;"; - public DatabaseManager(String host, int port, String user, String password, String table, String db, String defaults) throws SQLException { + public DatabaseManager(String host, int port, String user, String password, String db, String table) throws SQLException { + super(); this.host = host; @@ ... @@ this.db = db; - this.connect(defaults); } - private void connect(String defaults){ + public void connect(String defaults, String... args) throws SQLException{ + this.connect(); + PreparedStatement statement = this.connection.prepareStatement(defaults); + for(int x = 0; x < args.length; x++) + try { + statement.setInt(x, Integer.parseInt(args[x])); + }catch(NumberFormatException e){ + statement.setString(x, args[x]); + } + statement.execute(); + statement.close(); + } + + public void connect() throws SQLException{ MysqlDataSource d = new MysqlDataSource(); @@ ... @@ d.setDatabaseName(this.db); + this.connection = d.getConnection(); } - private void connect(){ - + public PreparedStatement prepareStatement(String sql) throws SQLException{ + return this.connection.prepareStatement(sql); }
--- a/Server/src/main/java/me/jesensky/dan/playertracker/util/DatabaseManager.java +++ b/Server/src/main/java/me/jesensky/dan/playertracker/util/DatabaseManager.java @@ -2,8 +2,6 @@ CON DEL import com.mysql.jdbc.MySQLConnection; CON import com.mysql.jdbc.jdbc2.optional.MysqlDataSource; CON DEL import javax.sql.DataSource; CON import java.sql.Connection; DEL import java.sql.DriverManager; ADD import java.sql.PreparedStatement; CON import java.sql.SQLException; @@ -18,4 +16,7 @@ CON private int port; ADD public static final String PLAYER_DB_DEFAULT = "SELECT * FROM information_schema.tables WHERE table_schema = '?' AND table_name = 'players' LIMIT 1;"; ADD public static final String USER_DB_DEFAULT = "SELECT * FROM information_schema.tables WHERE table_schema = '?' AND table_name = 'users' LIMIT 1;"; CON DEL public DatabaseManager(String host, int port, String user, String password, String table, String db, String defaults) throws SQLException { ADD public DatabaseManager(String host, int port, String user, String password, String db, String table) throws SQLException { ADD super(); CON this.host = host; @@ -26,6 +27,18 @@ CON this.db = db; DEL this.connect(defaults); CON } CON DEL private void connect(String defaults){ ADD public void connect(String defaults, String... args) throws SQLException{ ADD this.connect(); ADD PreparedStatement statement = this.connection.prepareStatement(defaults); ADD for(int x = 0; x < args.length; x++) ADD try { ADD statement.setInt(x, Integer.parseInt(args[x])); ADD }catch(NumberFormatException e){ ADD statement.setString(x, args[x]); ADD } ADD statement.execute(); ADD statement.close(); ADD } ADD ADD public void connect() throws SQLException{ CON MysqlDataSource d = new MysqlDataSource(); @@ -36,6 +49,7 @@ CON d.setDatabaseName(this.db); ADD this.connection = d.getConnection(); CON } CON DEL private void connect(){ DEL ADD public PreparedStatement prepareStatement(String sql) throws SQLException{ ADD return this.connection.prepareStatement(sql); CON }
<<<<<<< SEARCH package me.jesensky.dan.playertracker.util; import com.mysql.jdbc.MySQLConnection; import com.mysql.jdbc.jdbc2.optional.MysqlDataSource; import javax.sql.DataSource; import java.sql.Connection; import java.sql.DriverManager; import java.sql.SQLException; ======= package me.jesensky.dan.playertracker.util; import com.mysql.jdbc.jdbc2.optional.MysqlDataSource; import java.sql.Connection; import java.sql.PreparedStatement; import java.sql.SQLException; >>>>>>> REPLACE <<<<<<< SEARCH private String db; private int port; public DatabaseManager(String host, int port, String user, String password, String table, String db, String defaults) throws SQLException { this.host = host; this.port = port; ======= private String db; private int port; public static final String PLAYER_DB_DEFAULT = "SELECT * FROM information_schema.tables WHERE table_schema = '?' AND table_name = 'players' LIMIT 1;"; public static final String USER_DB_DEFAULT = "SELECT * FROM information_schema.tables WHERE table_schema = '?' AND table_name = 'users' LIMIT 1;"; public DatabaseManager(String host, int port, String user, String password, String db, String table) throws SQLException { super(); this.host = host; this.port = port; >>>>>>> REPLACE <<<<<<< SEARCH this.table = table; this.db = db; this.connect(defaults); } private void connect(String defaults){ MysqlDataSource d = new MysqlDataSource(); d.setUser(this.username); ======= this.table = table; this.db = db; } public void connect(String defaults, String... args) throws SQLException{ this.connect(); PreparedStatement statement = this.connection.prepareStatement(defaults); for(int x = 0; x < args.length; x++) try { statement.setInt(x, Integer.parseInt(args[x])); }catch(NumberFormatException e){ statement.setString(x, args[x]); } statement.execute(); statement.close(); } public void connect() throws SQLException{ MysqlDataSource d = new MysqlDataSource(); d.setUser(this.username); >>>>>>> REPLACE <<<<<<< SEARCH d.setPort(this.port); d.setDatabaseName(this.db); } private void connect(){ } } ======= d.setPort(this.port); d.setDatabaseName(this.db); this.connection = d.getConnection(); } public PreparedStatement prepareStatement(String sql) throws SQLException{ return this.connection.prepareStatement(sql); } } >>>>>>> REPLACE
TheGreenMachine/Zephyr-Java
e236be94ce3bc9372c83792c7abf4de1a0369f46
src/com/edinarobotics/zephyr/Zephyr.java
java
bsd-3-clause
Add joystick control to the new shooter Jaguar.
/*----------------------------------------------------------------------------*/ /* Copyright (c) FIRST 2008. All Rights Reserved. */ /* Open Source Software - may be modified and shared by FRC teams. The code */ /* must be accompanied by the FIRST BSD license file in the root directory of */ /* the project. */ /*----------------------------------------------------------------------------*/ package com.edinarobotics.zephyr; import com.edinarobotics.utils.gamepad.Gamepad; import edu.wpi.first.wpilibj.SimpleRobot; /** * The VM is configured to automatically run this class, and to call the * functions corresponding to each mode, as described in the SimpleRobot * documentation. If you change the name of this class or the package after * creating this project, you must also update the manifest file in the resource * directory. */ public class Zephyr extends SimpleRobot { /** * This function is called once each time the robot enters autonomous mode. */ public void autonomous() { } /** * This function is called once each time the robot enters operator control. */ public void operatorControl() { Gamepad gamepad1 = new Gamepad(1); Components components = Components.getInstance(); while(this.isOperatorControl()&&this.isEnabled()){ components.leftJaguar.set(gamepad1.getLeftY()); components.rightJaguar.set(gamepad1.getRightY()); } components.leftJaguar.set(0); components.rightJaguar.set(0); } }
/*----------------------------------------------------------------------------*/ /* Copyright (c) FIRST 2008. All Rights Reserved. */ /* Open Source Software - may be modified and shared by FRC teams. The code */ /* must be accompanied by the FIRST BSD license file in the root directory of */ /* the project. */ /*----------------------------------------------------------------------------*/ package com.edinarobotics.zephyr; import com.edinarobotics.utils.gamepad.Gamepad; import edu.wpi.first.wpilibj.SimpleRobot; /** * The VM is configured to automatically run this class, and to call the * functions corresponding to each mode, as described in the SimpleRobot * documentation. If you change the name of this class or the package after * creating this project, you must also update the manifest file in the resource * directory. */ public class Zephyr extends SimpleRobot { /** * This function is called once each time the robot enters autonomous mode. */ public void autonomous() { } /** * This function is called once each time the robot enters operator control. */ public void operatorControl() { Gamepad gamepad1 = new Gamepad(1); Gamepad gamepad2 = new Gamepad(2); Components components = Components.getInstance(); while(this.isOperatorControl()&&this.isEnabled()){ components.leftJaguar.set(gamepad1.getLeftY()); components.rightJaguar.set(gamepad1.getRightY()); components.shooterJaguar.set(gamepad2.getLeftY()); } components.leftJaguar.set(0); components.rightJaguar.set(0); } }
2
0
2
add_only
--- a/src/com/edinarobotics/zephyr/Zephyr.java +++ b/src/com/edinarobotics/zephyr/Zephyr.java @@ -33,2 +33,3 @@ Gamepad gamepad1 = new Gamepad(1); + Gamepad gamepad2 = new Gamepad(2); Components components = Components.getInstance(); @@ -37,2 +38,3 @@ components.rightJaguar.set(gamepad1.getRightY()); + components.shooterJaguar.set(gamepad2.getLeftY()); }
--- a/src/com/edinarobotics/zephyr/Zephyr.java +++ b/src/com/edinarobotics/zephyr/Zephyr.java @@ ... @@ Gamepad gamepad1 = new Gamepad(1); + Gamepad gamepad2 = new Gamepad(2); Components components = Components.getInstance(); @@ ... @@ components.rightJaguar.set(gamepad1.getRightY()); + components.shooterJaguar.set(gamepad2.getLeftY()); }
--- a/src/com/edinarobotics/zephyr/Zephyr.java +++ b/src/com/edinarobotics/zephyr/Zephyr.java @@ -33,2 +33,3 @@ CON Gamepad gamepad1 = new Gamepad(1); ADD Gamepad gamepad2 = new Gamepad(2); CON Components components = Components.getInstance(); @@ -37,2 +38,3 @@ CON components.rightJaguar.set(gamepad1.getRightY()); ADD components.shooterJaguar.set(gamepad2.getLeftY()); CON }
<<<<<<< SEARCH public void operatorControl() { Gamepad gamepad1 = new Gamepad(1); Components components = Components.getInstance(); while(this.isOperatorControl()&&this.isEnabled()){ components.leftJaguar.set(gamepad1.getLeftY()); components.rightJaguar.set(gamepad1.getRightY()); } components.leftJaguar.set(0); ======= public void operatorControl() { Gamepad gamepad1 = new Gamepad(1); Gamepad gamepad2 = new Gamepad(2); Components components = Components.getInstance(); while(this.isOperatorControl()&&this.isEnabled()){ components.leftJaguar.set(gamepad1.getLeftY()); components.rightJaguar.set(gamepad1.getRightY()); components.shooterJaguar.set(gamepad2.getLeftY()); } components.leftJaguar.set(0); >>>>>>> REPLACE
mjtamlyn/back2back
9058d2ddc9a89913710df0efc8d7c88471592795
back2back/management/commands/import_entries.py
python
bsd-2-clause
Save indexes as well when importing entries.
import csv from optparse import make_option from django.core.management import BaseCommand from back2back.models import Entry class Command(BaseCommand): option_list = BaseCommand.option_list + ( make_option( '-i', '--input', action='store', dest='input_file', default=None, ), make_option( '--reset', action='store_true', dest='reset', default=False, ), ) def handle(self, *args, **options): if options['reset']: Entry.objects.all().delete() input_file = options['input_file'] with open(input_file) as f: reader = csv.reader(f) for row in reader: Entry.objects.create(category=row[0], name=row[1], first_group_number=row[2])
import collections import csv from optparse import make_option from django.core.management import BaseCommand from back2back.models import Entry class Command(BaseCommand): option_list = BaseCommand.option_list + ( make_option( '-i', '--input', action='store', dest='input_file', default=None, ), make_option( '--reset', action='store_true', dest='reset', default=False, ), ) def handle(self, *args, **options): if options['reset']: Entry.objects.all().delete() input_file = options['input_file'] category_group_counts = collections.defaultdict(int) with open(input_file) as f: reader = csv.reader(f) for row in reader: if not row[1].strip(): continue Entry.objects.create( category=row[0], name=row[1], first_group_number=row[2], first_group_index=category_group_counts[(row[0], row[2])], ) category_group_counts[(row[0], row[2])] += 1
11
1
3
mixed
--- a/back2back/management/commands/import_entries.py +++ b/back2back/management/commands/import_entries.py @@ -1 +1,2 @@ +import collections import csv @@ -28,2 +29,3 @@ input_file = options['input_file'] + category_group_counts = collections.defaultdict(int) with open(input_file) as f: @@ -31,2 +33,10 @@ for row in reader: - Entry.objects.create(category=row[0], name=row[1], first_group_number=row[2]) + if not row[1].strip(): + continue + Entry.objects.create( + category=row[0], + name=row[1], + first_group_number=row[2], + first_group_index=category_group_counts[(row[0], row[2])], + ) + category_group_counts[(row[0], row[2])] += 1
--- a/back2back/management/commands/import_entries.py +++ b/back2back/management/commands/import_entries.py @@ ... @@ +import collections import csv @@ ... @@ input_file = options['input_file'] + category_group_counts = collections.defaultdict(int) with open(input_file) as f: @@ ... @@ for row in reader: - Entry.objects.create(category=row[0], name=row[1], first_group_number=row[2]) + if not row[1].strip(): + continue + Entry.objects.create( + category=row[0], + name=row[1], + first_group_number=row[2], + first_group_index=category_group_counts[(row[0], row[2])], + ) + category_group_counts[(row[0], row[2])] += 1
--- a/back2back/management/commands/import_entries.py +++ b/back2back/management/commands/import_entries.py @@ -1 +1,2 @@ ADD import collections CON import csv @@ -28,2 +29,3 @@ CON input_file = options['input_file'] ADD category_group_counts = collections.defaultdict(int) CON with open(input_file) as f: @@ -31,2 +33,10 @@ CON for row in reader: DEL Entry.objects.create(category=row[0], name=row[1], first_group_number=row[2]) ADD if not row[1].strip(): ADD continue ADD Entry.objects.create( ADD category=row[0], ADD name=row[1], ADD first_group_number=row[2], ADD first_group_index=category_group_counts[(row[0], row[2])], ADD ) ADD category_group_counts[(row[0], row[2])] += 1
<<<<<<< SEARCH import csv from optparse import make_option ======= import collections import csv from optparse import make_option >>>>>>> REPLACE <<<<<<< SEARCH Entry.objects.all().delete() input_file = options['input_file'] with open(input_file) as f: reader = csv.reader(f) for row in reader: Entry.objects.create(category=row[0], name=row[1], first_group_number=row[2]) ======= Entry.objects.all().delete() input_file = options['input_file'] category_group_counts = collections.defaultdict(int) with open(input_file) as f: reader = csv.reader(f) for row in reader: if not row[1].strip(): continue Entry.objects.create( category=row[0], name=row[1], first_group_number=row[2], first_group_index=category_group_counts[(row[0], row[2])], ) category_group_counts[(row[0], row[2])] += 1 >>>>>>> REPLACE
bluss/arrayvec
d273861cde87ac91a18e2a169512443b848e7585
src/lib.rs
rust
apache-2.0
DOC: Update MSRV doc to Rust 1.51
//! **arrayvec** provides the types `ArrayVec` and `ArrayString`: //! array-backed vector and string types, which store their contents inline. //! //! The arrayvec package has the following cargo features: //! //! - `std` //! - Optional, enabled by default //! - Use libstd; disable to use `no_std` instead. //! //! - `serde` //! - Optional //! - Enable serialization for ArrayVec and ArrayString using serde 1.x //! //! - `unstable-const-fn` //! - Optional //! - Makes [`ArrayVec::new`] and [`ArrayString::new`] `const fn`s, //! using the nightly `const_fn` feature. //! - Unstable and requires nightly. //! //! ## Rust Version //! //! This version of arrayvec requires Rust 1.36 or later. //! #![doc(html_root_url="https://docs.rs/arrayvec/0.5/")] #![cfg_attr(not(feature="std"), no_std)] #![cfg_attr(feature="unstable-const-fn", feature(const_fn, const_maybe_uninit_assume_init))] #[cfg(feature="serde")] extern crate serde; #[cfg(not(feature="std"))] extern crate core as std; mod arrayvec_impl; mod arrayvec; mod array_string; mod char; mod errors; pub use crate::array_string::ArrayString; pub use crate::errors::CapacityError; pub use crate::arrayvec::{ArrayVec, IntoIter, Drain};
//! **arrayvec** provides the types `ArrayVec` and `ArrayString`: //! array-backed vector and string types, which store their contents inline. //! //! The arrayvec package has the following cargo features: //! //! - `std` //! - Optional, enabled by default //! - Use libstd; disable to use `no_std` instead. //! //! - `serde` //! - Optional //! - Enable serialization for ArrayVec and ArrayString using serde 1.x //! //! - `unstable-const-fn` //! - Optional //! - Makes [`ArrayVec::new`] and [`ArrayString::new`] `const fn`s, //! using the nightly `const_fn` feature. //! - Unstable and requires nightly. //! //! ## Rust Version //! //! This version of arrayvec requires Rust 1.51 or later. //! #![doc(html_root_url="https://docs.rs/arrayvec/0.5/")] #![cfg_attr(not(feature="std"), no_std)] #![cfg_attr(feature="unstable-const-fn", feature(const_fn, const_maybe_uninit_assume_init))] #[cfg(feature="serde")] extern crate serde; #[cfg(not(feature="std"))] extern crate core as std; mod arrayvec_impl; mod arrayvec; mod array_string; mod char; mod errors; pub use crate::array_string::ArrayString; pub use crate::errors::CapacityError; pub use crate::arrayvec::{ArrayVec, IntoIter, Drain};
1
1
1
mixed
--- a/src/lib.rs +++ b/src/lib.rs @@ -21,3 +21,3 @@ //! -//! This version of arrayvec requires Rust 1.36 or later. +//! This version of arrayvec requires Rust 1.51 or later. //!
--- a/src/lib.rs +++ b/src/lib.rs @@ ... @@ //! -//! This version of arrayvec requires Rust 1.36 or later. +//! This version of arrayvec requires Rust 1.51 or later. //!
--- a/src/lib.rs +++ b/src/lib.rs @@ -21,3 +21,3 @@ CON //! DEL //! This version of arrayvec requires Rust 1.36 or later. ADD //! This version of arrayvec requires Rust 1.51 or later. CON //!
<<<<<<< SEARCH //! ## Rust Version //! //! This version of arrayvec requires Rust 1.36 or later. //! #![doc(html_root_url="https://docs.rs/arrayvec/0.5/")] ======= //! ## Rust Version //! //! This version of arrayvec requires Rust 1.51 or later. //! #![doc(html_root_url="https://docs.rs/arrayvec/0.5/")] >>>>>>> REPLACE
magnusja/libaums
1d514078dac4c6d91ea3cdc4f4d3ce2a2ada4ecc
libaums/src/main/java/com/github/mjdev/libaums/driver/scsi/commands/ScsiReadCapacity.kt
kotlin
apache-2.0
READ CAPACITY(10) is long 10 not 0x10
/* * (C) Copyright 2014 mjahnen <[email protected]> * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ package com.github.mjdev.libaums.driver.scsi.commands import java.nio.ByteBuffer /** * Represents the command to read the capacity from the mass storage device. * * * The data is transferred in the data phase. * * @author mjahnen * @see com.github.mjdev.libaums.driver.scsi.commands.ScsiReadCapacityResponse */ class ScsiReadCapacity(lun: Byte) : CommandBlockWrapper(RESPONSE_LENGTH, Direction.IN, lun, LENGTH) { override fun serialize(buffer: ByteBuffer) { super.serialize(buffer) buffer.put(OPCODE) } companion object { private const val RESPONSE_LENGTH = 0x8 private const val LENGTH: Byte = 0x10 private const val OPCODE: Byte = 0x25 } }
/* * (C) Copyright 2014 mjahnen <[email protected]> * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ package com.github.mjdev.libaums.driver.scsi.commands import java.nio.ByteBuffer /** * Represents the command to read the capacity from the mass storage device. * * * The data is transferred in the data phase. * * @author mjahnen * @see com.github.mjdev.libaums.driver.scsi.commands.ScsiReadCapacityResponse */ class ScsiReadCapacity(lun: Byte) : CommandBlockWrapper(RESPONSE_LENGTH, Direction.IN, lun, LENGTH) { override fun serialize(buffer: ByteBuffer) { super.serialize(buffer) buffer.put(OPCODE) } companion object { private const val RESPONSE_LENGTH = 0x8 private const val LENGTH: Byte = 10 private const val OPCODE: Byte = 0x25 } }
1
1
1
mixed
--- a/libaums/src/main/java/com/github/mjdev/libaums/driver/scsi/commands/ScsiReadCapacity.kt +++ b/libaums/src/main/java/com/github/mjdev/libaums/driver/scsi/commands/ScsiReadCapacity.kt @@ -40,3 +40,3 @@ private const val RESPONSE_LENGTH = 0x8 - private const val LENGTH: Byte = 0x10 + private const val LENGTH: Byte = 10 private const val OPCODE: Byte = 0x25
--- a/libaums/src/main/java/com/github/mjdev/libaums/driver/scsi/commands/ScsiReadCapacity.kt +++ b/libaums/src/main/java/com/github/mjdev/libaums/driver/scsi/commands/ScsiReadCapacity.kt @@ ... @@ private const val RESPONSE_LENGTH = 0x8 - private const val LENGTH: Byte = 0x10 + private const val LENGTH: Byte = 10 private const val OPCODE: Byte = 0x25
--- a/libaums/src/main/java/com/github/mjdev/libaums/driver/scsi/commands/ScsiReadCapacity.kt +++ b/libaums/src/main/java/com/github/mjdev/libaums/driver/scsi/commands/ScsiReadCapacity.kt @@ -40,3 +40,3 @@ CON private const val RESPONSE_LENGTH = 0x8 DEL private const val LENGTH: Byte = 0x10 ADD private const val LENGTH: Byte = 10 CON private const val OPCODE: Byte = 0x25
<<<<<<< SEARCH private const val RESPONSE_LENGTH = 0x8 private const val LENGTH: Byte = 0x10 private const val OPCODE: Byte = 0x25 } ======= private const val RESPONSE_LENGTH = 0x8 private const val LENGTH: Byte = 10 private const val OPCODE: Byte = 0x25 } >>>>>>> REPLACE
uni-rs/uni.rs
4558c9d062e28ce114317b953a7f1256a8867441
src/net/eth/callbacks.rs
rust
mit
net/eth: Add definition of type EthernetCallbacks
//! Useful callbacks used by ethernet's specific filter
//! Useful callbacks used by ethernet's specific filter use boxed::Box; use net::Packet; use net::defs::{Rule, EthernetRule, EtherType, ETHERTYPE_IPV4, ETHERTYPE_IPV6}; use net::conn::filter::{SpecificCallbacks, GenericFilterTrait}; use net::ipv4::Ipv4GenericFilter; use super::defs::Header; /// Defines specific callbacks for ethernet protocol pub struct EthernetCallbacks; impl SpecificCallbacks<EtherType> for EthernetCallbacks { /// Create a network filter based on the ether type fn filter_from_generic_parameter(ether_type: EtherType) -> Option<Box<GenericFilterTrait>> { match ether_type { ETHERTYPE_IPV4 => Some(Box::new(Ipv4GenericFilter::new())), ETHERTYPE_IPV6 => unimplemented!(), _ => None, } } #[inline] /// Does the rule has a network rule component fn has_upper_filter(rule: &Rule) -> bool { rule.net_rule.is_some() } /// Set ethernet part of the rule with information gathered from the packet fn set_layer_rule(rule: &mut Rule, pkt: &Packet) { let hdr = pkt.link_header::<Header>().unwrap(); rule.eth_rule = Some(EthernetRule { ether_type: hdr.ether_type.as_host(), hw_in: Some(hdr.src.clone()), }); } }
42
0
1
add_only
--- a/src/net/eth/callbacks.rs +++ b/src/net/eth/callbacks.rs @@ -1 +1,43 @@ //! Useful callbacks used by ethernet's specific filter + +use boxed::Box; + +use net::Packet; + +use net::defs::{Rule, EthernetRule, EtherType, ETHERTYPE_IPV4, ETHERTYPE_IPV6}; + +use net::conn::filter::{SpecificCallbacks, GenericFilterTrait}; + +use net::ipv4::Ipv4GenericFilter; + +use super::defs::Header; + +/// Defines specific callbacks for ethernet protocol +pub struct EthernetCallbacks; + +impl SpecificCallbacks<EtherType> for EthernetCallbacks { + /// Create a network filter based on the ether type + fn filter_from_generic_parameter(ether_type: EtherType) -> Option<Box<GenericFilterTrait>> { + match ether_type { + ETHERTYPE_IPV4 => Some(Box::new(Ipv4GenericFilter::new())), + ETHERTYPE_IPV6 => unimplemented!(), + _ => None, + } + } + + #[inline] + /// Does the rule has a network rule component + fn has_upper_filter(rule: &Rule) -> bool { + rule.net_rule.is_some() + } + + /// Set ethernet part of the rule with information gathered from the packet + fn set_layer_rule(rule: &mut Rule, pkt: &Packet) { + let hdr = pkt.link_header::<Header>().unwrap(); + + rule.eth_rule = Some(EthernetRule { + ether_type: hdr.ether_type.as_host(), + hw_in: Some(hdr.src.clone()), + }); + } +}
--- a/src/net/eth/callbacks.rs +++ b/src/net/eth/callbacks.rs @@ ... @@ //! Useful callbacks used by ethernet's specific filter + +use boxed::Box; + +use net::Packet; + +use net::defs::{Rule, EthernetRule, EtherType, ETHERTYPE_IPV4, ETHERTYPE_IPV6}; + +use net::conn::filter::{SpecificCallbacks, GenericFilterTrait}; + +use net::ipv4::Ipv4GenericFilter; + +use super::defs::Header; + +/// Defines specific callbacks for ethernet protocol +pub struct EthernetCallbacks; + +impl SpecificCallbacks<EtherType> for EthernetCallbacks { + /// Create a network filter based on the ether type + fn filter_from_generic_parameter(ether_type: EtherType) -> Option<Box<GenericFilterTrait>> { + match ether_type { + ETHERTYPE_IPV4 => Some(Box::new(Ipv4GenericFilter::new())), + ETHERTYPE_IPV6 => unimplemented!(), + _ => None, + } + } + + #[inline] + /// Does the rule has a network rule component + fn has_upper_filter(rule: &Rule) -> bool { + rule.net_rule.is_some() + } + + /// Set ethernet part of the rule with information gathered from the packet + fn set_layer_rule(rule: &mut Rule, pkt: &Packet) { + let hdr = pkt.link_header::<Header>().unwrap(); + + rule.eth_rule = Some(EthernetRule { + ether_type: hdr.ether_type.as_host(), + hw_in: Some(hdr.src.clone()), + }); + } +}
--- a/src/net/eth/callbacks.rs +++ b/src/net/eth/callbacks.rs @@ -1 +1,43 @@ CON //! Useful callbacks used by ethernet's specific filter ADD ADD use boxed::Box; ADD ADD use net::Packet; ADD ADD use net::defs::{Rule, EthernetRule, EtherType, ETHERTYPE_IPV4, ETHERTYPE_IPV6}; ADD ADD use net::conn::filter::{SpecificCallbacks, GenericFilterTrait}; ADD ADD use net::ipv4::Ipv4GenericFilter; ADD ADD use super::defs::Header; ADD ADD /// Defines specific callbacks for ethernet protocol ADD pub struct EthernetCallbacks; ADD ADD impl SpecificCallbacks<EtherType> for EthernetCallbacks { ADD /// Create a network filter based on the ether type ADD fn filter_from_generic_parameter(ether_type: EtherType) -> Option<Box<GenericFilterTrait>> { ADD match ether_type { ADD ETHERTYPE_IPV4 => Some(Box::new(Ipv4GenericFilter::new())), ADD ETHERTYPE_IPV6 => unimplemented!(), ADD _ => None, ADD } ADD } ADD ADD #[inline] ADD /// Does the rule has a network rule component ADD fn has_upper_filter(rule: &Rule) -> bool { ADD rule.net_rule.is_some() ADD } ADD ADD /// Set ethernet part of the rule with information gathered from the packet ADD fn set_layer_rule(rule: &mut Rule, pkt: &Packet) { ADD let hdr = pkt.link_header::<Header>().unwrap(); ADD ADD rule.eth_rule = Some(EthernetRule { ADD ether_type: hdr.ether_type.as_host(), ADD hw_in: Some(hdr.src.clone()), ADD }); ADD } ADD }
<<<<<<< SEARCH //! Useful callbacks used by ethernet's specific filter ======= //! Useful callbacks used by ethernet's specific filter use boxed::Box; use net::Packet; use net::defs::{Rule, EthernetRule, EtherType, ETHERTYPE_IPV4, ETHERTYPE_IPV6}; use net::conn::filter::{SpecificCallbacks, GenericFilterTrait}; use net::ipv4::Ipv4GenericFilter; use super::defs::Header; /// Defines specific callbacks for ethernet protocol pub struct EthernetCallbacks; impl SpecificCallbacks<EtherType> for EthernetCallbacks { /// Create a network filter based on the ether type fn filter_from_generic_parameter(ether_type: EtherType) -> Option<Box<GenericFilterTrait>> { match ether_type { ETHERTYPE_IPV4 => Some(Box::new(Ipv4GenericFilter::new())), ETHERTYPE_IPV6 => unimplemented!(), _ => None, } } #[inline] /// Does the rule has a network rule component fn has_upper_filter(rule: &Rule) -> bool { rule.net_rule.is_some() } /// Set ethernet part of the rule with information gathered from the packet fn set_layer_rule(rule: &mut Rule, pkt: &Packet) { let hdr = pkt.link_header::<Header>().unwrap(); rule.eth_rule = Some(EthernetRule { ether_type: hdr.ether_type.as_host(), hw_in: Some(hdr.src.clone()), }); } } >>>>>>> REPLACE
onur/at_sirk
25a7847a1825ebee3c1386e55753783cf1d115b3
src/plugins/eksi.rs
rust
mit
Make it much more idiomatic
use at::{AtRust, AtTrigger}; use plugins::get; use select::document::Document; use select::predicate::{Attr, Class, Name}; use rand; pub fn eksi(trigger: &AtTrigger, at: &AtRust) { let url = format!("https://eksisozluk.com/{}", trigger.command_message); let body = get(&url[..]); let body_str = body.unwrap(); let document = Document::from_str(&body_str[..]); let mut entries: Vec<String> = Vec::new(); for entry in document.find(Attr("id", "entry-list")).find(Name("li")).iter() { let entry_text = entry.find(Class("content")).first().unwrap().text(); // FIXME: Do I really need to clone text? entries.push(entry_text.clone()); } at.reply(trigger, &entries[rand::random::<usize>() % entries.len()][..]); }
use at::{AtRust, AtTrigger}; use plugins::get; use select::document::Document; use select::predicate::{Attr, Class, Name}; use rand; pub fn get_eksi(query: &str) -> Option<Vec<String>> { let url = format!("https://eksisozluk.com/{}", query); let body = match get(&url[..]) { Some(n) => n, None => return None }; let document = Document::from_str(&body[..]); let mut entries: Vec<String> = Vec::new(); for entry in document.find(Attr("id", "entry-list")).find(Name("li")).iter() { let entry_text = entry.find(Class("content")).first().unwrap().text(); // FIXME: Do I really need to clone text? entries.push(entry_text.clone()); } return match entries.len() { 0 => None, _ => Some(entries) }; } pub fn eksi(trigger: &AtTrigger, at: &AtRust) { let entries = get_eksi(&trigger.command_message[..]); match entries { Some(entries) => at.reply(trigger, &entries[rand::random::<usize>() % entries.len()][..]), None => {} } }
26
6
2
mixed
--- a/src/plugins/eksi.rs +++ b/src/plugins/eksi.rs @@ -11,8 +11,12 @@ -pub fn eksi(trigger: &AtTrigger, at: &AtRust) { - let url = format!("https://eksisozluk.com/{}", trigger.command_message); - let body = get(&url[..]); +pub fn get_eksi(query: &str) -> Option<Vec<String>> { - let body_str = body.unwrap(); - let document = Document::from_str(&body_str[..]); + let url = format!("https://eksisozluk.com/{}", query); + + let body = match get(&url[..]) { + Some(n) => n, + None => return None + }; + + let document = Document::from_str(&body[..]); @@ -26,3 +30,19 @@ - at.reply(trigger, &entries[rand::random::<usize>() % entries.len()][..]); + return match entries.len() { + 0 => None, + _ => Some(entries) + }; + } + + +pub fn eksi(trigger: &AtTrigger, at: &AtRust) { + + let entries = get_eksi(&trigger.command_message[..]); + + match entries { + Some(entries) => at.reply(trigger, &entries[rand::random::<usize>() % entries.len()][..]), + None => {} + } + +}
--- a/src/plugins/eksi.rs +++ b/src/plugins/eksi.rs @@ ... @@ -pub fn eksi(trigger: &AtTrigger, at: &AtRust) { - let url = format!("https://eksisozluk.com/{}", trigger.command_message); - let body = get(&url[..]); +pub fn get_eksi(query: &str) -> Option<Vec<String>> { - let body_str = body.unwrap(); - let document = Document::from_str(&body_str[..]); + let url = format!("https://eksisozluk.com/{}", query); + + let body = match get(&url[..]) { + Some(n) => n, + None => return None + }; + + let document = Document::from_str(&body[..]); @@ ... @@ - at.reply(trigger, &entries[rand::random::<usize>() % entries.len()][..]); + return match entries.len() { + 0 => None, + _ => Some(entries) + }; + } + + +pub fn eksi(trigger: &AtTrigger, at: &AtRust) { + + let entries = get_eksi(&trigger.command_message[..]); + + match entries { + Some(entries) => at.reply(trigger, &entries[rand::random::<usize>() % entries.len()][..]), + None => {} + } + +}
--- a/src/plugins/eksi.rs +++ b/src/plugins/eksi.rs @@ -11,8 +11,12 @@ CON DEL pub fn eksi(trigger: &AtTrigger, at: &AtRust) { DEL let url = format!("https://eksisozluk.com/{}", trigger.command_message); DEL let body = get(&url[..]); ADD pub fn get_eksi(query: &str) -> Option<Vec<String>> { CON DEL let body_str = body.unwrap(); DEL let document = Document::from_str(&body_str[..]); ADD let url = format!("https://eksisozluk.com/{}", query); ADD ADD let body = match get(&url[..]) { ADD Some(n) => n, ADD None => return None ADD }; ADD ADD let document = Document::from_str(&body[..]); CON @@ -26,3 +30,19 @@ CON DEL at.reply(trigger, &entries[rand::random::<usize>() % entries.len()][..]); ADD return match entries.len() { ADD 0 => None, ADD _ => Some(entries) ADD }; ADD CON } ADD ADD ADD pub fn eksi(trigger: &AtTrigger, at: &AtRust) { ADD ADD let entries = get_eksi(&trigger.command_message[..]); ADD ADD match entries { ADD Some(entries) => at.reply(trigger, &entries[rand::random::<usize>() % entries.len()][..]), ADD None => {} ADD } ADD ADD }
<<<<<<< SEARCH pub fn eksi(trigger: &AtTrigger, at: &AtRust) { let url = format!("https://eksisozluk.com/{}", trigger.command_message); let body = get(&url[..]); let body_str = body.unwrap(); let document = Document::from_str(&body_str[..]); let mut entries: Vec<String> = Vec::new(); ======= pub fn get_eksi(query: &str) -> Option<Vec<String>> { let url = format!("https://eksisozluk.com/{}", query); let body = match get(&url[..]) { Some(n) => n, None => return None }; let document = Document::from_str(&body[..]); let mut entries: Vec<String> = Vec::new(); >>>>>>> REPLACE <<<<<<< SEARCH } at.reply(trigger, &entries[rand::random::<usize>() % entries.len()][..]); } ======= } return match entries.len() { 0 => None, _ => Some(entries) }; } pub fn eksi(trigger: &AtTrigger, at: &AtRust) { let entries = get_eksi(&trigger.command_message[..]); match entries { Some(entries) => at.reply(trigger, &entries[rand::random::<usize>() % entries.len()][..]), None => {} } } >>>>>>> REPLACE
spacejam/sled
43746342dc764eb1723ff300c1d69f6857c7b1c8
src/concurrency_control.rs
rust
apache-2.0
Fix race condition in the CC upgrade path
use std::sync::atomic::AtomicBool; use parking_lot::{RwLockReadGuard, RwLockWriteGuard}; use super::*; #[derive(Default)] pub(crate) struct ConcurrencyControl { necessary: AtomicBool, rw: RwLock<()>, } pub(crate) enum Protector<'a> { Write(RwLockWriteGuard<'a, ()>), Read(RwLockReadGuard<'a, ()>), None, } impl ConcurrencyControl { fn enable(&self) { if !self.necessary.swap(true, SeqCst) { // upgrade the system to using transactional // concurrency control, which is a little // more expensive for every operation. let (tx, rx) = std::sync::mpsc::channel(); let guard = pin(); guard.defer(move || tx.send(()).unwrap()); guard.flush(); drop(guard); rx.recv().unwrap(); } } pub(crate) fn read<'a>(&'a self, _: &'a Guard) -> Protector<'a> { if self.necessary.load(SeqCst) { Protector::Read(self.rw.read()) } else { Protector::None } } pub(crate) fn write(&self) -> Protector<'_> { self.enable(); Protector::Write(self.rw.write()) } }
use std::sync::atomic::AtomicBool; use parking_lot::{RwLockReadGuard, RwLockWriteGuard}; use super::*; #[derive(Default)] pub(crate) struct ConcurrencyControl { necessary: AtomicBool, upgrade_complete: AtomicBool, rw: RwLock<()>, } pub(crate) enum Protector<'a> { Write(RwLockWriteGuard<'a, ()>), Read(RwLockReadGuard<'a, ()>), None, } impl ConcurrencyControl { fn enable(&self) { if !self.necessary.load(Acquire) && !self.necessary.swap(true, SeqCst) { // upgrade the system to using transactional // concurrency control, which is a little // more expensive for every operation. let (tx, rx) = std::sync::mpsc::channel(); let guard = pin(); guard.defer(move || tx.send(()).unwrap()); guard.flush(); drop(guard); rx.recv().unwrap(); self.upgrade_complete.store(true, Release); } } pub(crate) fn read<'a>(&'a self, _: &'a Guard) -> Protector<'a> { if self.necessary.load(Acquire) { Protector::Read(self.rw.read()) } else { Protector::None } } pub(crate) fn write(&self) -> Protector<'_> { self.enable(); while !self.upgrade_complete.load(Acquire) { std::sync::atomic::spin_loop_hint() } Protector::Write(self.rw.write()) } }
7
2
5
mixed
--- a/src/concurrency_control.rs +++ b/src/concurrency_control.rs @@ -9,2 +9,3 @@ necessary: AtomicBool, + upgrade_complete: AtomicBool, rw: RwLock<()>, @@ -20,3 +21,3 @@ fn enable(&self) { - if !self.necessary.swap(true, SeqCst) { + if !self.necessary.load(Acquire) && !self.necessary.swap(true, SeqCst) { // upgrade the system to using transactional @@ -32,2 +33,3 @@ rx.recv().unwrap(); + self.upgrade_complete.store(true, Release); } @@ -36,3 +38,3 @@ pub(crate) fn read<'a>(&'a self, _: &'a Guard) -> Protector<'a> { - if self.necessary.load(SeqCst) { + if self.necessary.load(Acquire) { Protector::Read(self.rw.read()) @@ -45,2 +47,5 @@ self.enable(); + while !self.upgrade_complete.load(Acquire) { + std::sync::atomic::spin_loop_hint() + } Protector::Write(self.rw.write())
--- a/src/concurrency_control.rs +++ b/src/concurrency_control.rs @@ ... @@ necessary: AtomicBool, + upgrade_complete: AtomicBool, rw: RwLock<()>, @@ ... @@ fn enable(&self) { - if !self.necessary.swap(true, SeqCst) { + if !self.necessary.load(Acquire) && !self.necessary.swap(true, SeqCst) { // upgrade the system to using transactional @@ ... @@ rx.recv().unwrap(); + self.upgrade_complete.store(true, Release); } @@ ... @@ pub(crate) fn read<'a>(&'a self, _: &'a Guard) -> Protector<'a> { - if self.necessary.load(SeqCst) { + if self.necessary.load(Acquire) { Protector::Read(self.rw.read()) @@ ... @@ self.enable(); + while !self.upgrade_complete.load(Acquire) { + std::sync::atomic::spin_loop_hint() + } Protector::Write(self.rw.write())
--- a/src/concurrency_control.rs +++ b/src/concurrency_control.rs @@ -9,2 +9,3 @@ CON necessary: AtomicBool, ADD upgrade_complete: AtomicBool, CON rw: RwLock<()>, @@ -20,3 +21,3 @@ CON fn enable(&self) { DEL if !self.necessary.swap(true, SeqCst) { ADD if !self.necessary.load(Acquire) && !self.necessary.swap(true, SeqCst) { CON // upgrade the system to using transactional @@ -32,2 +33,3 @@ CON rx.recv().unwrap(); ADD self.upgrade_complete.store(true, Release); CON } @@ -36,3 +38,3 @@ CON pub(crate) fn read<'a>(&'a self, _: &'a Guard) -> Protector<'a> { DEL if self.necessary.load(SeqCst) { ADD if self.necessary.load(Acquire) { CON Protector::Read(self.rw.read()) @@ -45,2 +47,5 @@ CON self.enable(); ADD while !self.upgrade_complete.load(Acquire) { ADD std::sync::atomic::spin_loop_hint() ADD } CON Protector::Write(self.rw.write())
<<<<<<< SEARCH pub(crate) struct ConcurrencyControl { necessary: AtomicBool, rw: RwLock<()>, } ======= pub(crate) struct ConcurrencyControl { necessary: AtomicBool, upgrade_complete: AtomicBool, rw: RwLock<()>, } >>>>>>> REPLACE <<<<<<< SEARCH impl ConcurrencyControl { fn enable(&self) { if !self.necessary.swap(true, SeqCst) { // upgrade the system to using transactional // concurrency control, which is a little ======= impl ConcurrencyControl { fn enable(&self) { if !self.necessary.load(Acquire) && !self.necessary.swap(true, SeqCst) { // upgrade the system to using transactional // concurrency control, which is a little >>>>>>> REPLACE <<<<<<< SEARCH rx.recv().unwrap(); } } pub(crate) fn read<'a>(&'a self, _: &'a Guard) -> Protector<'a> { if self.necessary.load(SeqCst) { Protector::Read(self.rw.read()) } else { ======= rx.recv().unwrap(); self.upgrade_complete.store(true, Release); } } pub(crate) fn read<'a>(&'a self, _: &'a Guard) -> Protector<'a> { if self.necessary.load(Acquire) { Protector::Read(self.rw.read()) } else { >>>>>>> REPLACE <<<<<<< SEARCH pub(crate) fn write(&self) -> Protector<'_> { self.enable(); Protector::Write(self.rw.write()) } ======= pub(crate) fn write(&self) -> Protector<'_> { self.enable(); while !self.upgrade_complete.load(Acquire) { std::sync::atomic::spin_loop_hint() } Protector::Write(self.rw.write()) } >>>>>>> REPLACE
pypa/setuptools
c105d6f18a5a17b0a47fda5a2df2f8f47352b037
setuptools/command/upload.py
python
mit
Simplify logic by eliminating retries in password prompt and returning results directly.
import getpass from distutils.command import upload as orig class upload(orig.upload): """ Override default upload behavior to obtain password in a variety of different ways. """ def finalize_options(self): orig.upload.finalize_options(self) # Attempt to obtain password. Short circuit evaluation at the first # sign of success. self.password = ( self.password or self._load_password_from_keyring() or self._prompt_for_password() ) def _load_password_from_keyring(self): """ Attempt to load password from keyring. Suppress Exceptions. """ try: keyring = __import__('keyring') password = keyring.get_password(self.repository, self.username) except Exception: password = None finally: return password def _prompt_for_password(self): """ Prompt for a password on the tty. Suppress Exceptions. """ password = None try: while not password: password = getpass.getpass() except (Exception, KeyboardInterrupt): password = None finally: return password
import getpass from distutils.command import upload as orig class upload(orig.upload): """ Override default upload behavior to obtain password in a variety of different ways. """ def finalize_options(self): orig.upload.finalize_options(self) # Attempt to obtain password. Short circuit evaluation at the first # sign of success. self.password = ( self.password or self._load_password_from_keyring() or self._prompt_for_password() ) def _load_password_from_keyring(self): """ Attempt to load password from keyring. Suppress Exceptions. """ try: keyring = __import__('keyring') return keyring.get_password(self.repository, self.username) except Exception: pass def _prompt_for_password(self): """ Prompt for a password on the tty. Suppress Exceptions. """ try: return getpass.getpass() except (Exception, KeyboardInterrupt): pass
4
10
2
mixed
--- a/setuptools/command/upload.py +++ b/setuptools/command/upload.py @@ -26,7 +26,5 @@ keyring = __import__('keyring') - password = keyring.get_password(self.repository, self.username) + return keyring.get_password(self.repository, self.username) except Exception: - password = None - finally: - return password + pass @@ -36,9 +34,5 @@ """ - password = None try: - while not password: - password = getpass.getpass() + return getpass.getpass() except (Exception, KeyboardInterrupt): - password = None - finally: - return password + pass
--- a/setuptools/command/upload.py +++ b/setuptools/command/upload.py @@ ... @@ keyring = __import__('keyring') - password = keyring.get_password(self.repository, self.username) + return keyring.get_password(self.repository, self.username) except Exception: - password = None - finally: - return password + pass @@ ... @@ """ - password = None try: - while not password: - password = getpass.getpass() + return getpass.getpass() except (Exception, KeyboardInterrupt): - password = None - finally: - return password + pass
--- a/setuptools/command/upload.py +++ b/setuptools/command/upload.py @@ -26,7 +26,5 @@ CON keyring = __import__('keyring') DEL password = keyring.get_password(self.repository, self.username) ADD return keyring.get_password(self.repository, self.username) CON except Exception: DEL password = None DEL finally: DEL return password ADD pass CON @@ -36,9 +34,5 @@ CON """ DEL password = None CON try: DEL while not password: DEL password = getpass.getpass() ADD return getpass.getpass() CON except (Exception, KeyboardInterrupt): DEL password = None DEL finally: DEL return password ADD pass
<<<<<<< SEARCH try: keyring = __import__('keyring') password = keyring.get_password(self.repository, self.username) except Exception: password = None finally: return password def _prompt_for_password(self): """ Prompt for a password on the tty. Suppress Exceptions. """ password = None try: while not password: password = getpass.getpass() except (Exception, KeyboardInterrupt): password = None finally: return password ======= try: keyring = __import__('keyring') return keyring.get_password(self.repository, self.username) except Exception: pass def _prompt_for_password(self): """ Prompt for a password on the tty. Suppress Exceptions. """ try: return getpass.getpass() except (Exception, KeyboardInterrupt): pass >>>>>>> REPLACE
sake/bouncycastle-java
87b10ebb6399c8e9ee1984b32337a358001a4baf
src/org/bouncycastle/crypto/tls/TlsOuputStream.java
java
mit
Add cose() back as a deprecated method for backward compatibility
package org.bouncycastle.crypto.tls; import java.io.IOException; import java.io.OutputStream; /** * An OutputStream for an TLS connection. */ public class TlsOuputStream extends OutputStream { private TlsProtocolHandler handler; protected TlsOuputStream(TlsProtocolHandler handler) { this.handler = handler; } public void write(byte buf[], int offset, int len) throws IOException { this.handler.writeData(buf, offset, len); } public void write(int arg0) throws IOException { byte[] buf = new byte[1]; buf[0] = (byte)arg0; this.write(buf, 0, 1); } public void close() throws IOException { handler.close(); } public void flush() throws IOException { handler.flush(); } }
package org.bouncycastle.crypto.tls; import java.io.IOException; import java.io.OutputStream; /** * An OutputStream for an TLS connection. */ public class TlsOuputStream extends OutputStream { private TlsProtocolHandler handler; protected TlsOuputStream(TlsProtocolHandler handler) { this.handler = handler; } public void write(byte buf[], int offset, int len) throws IOException { this.handler.writeData(buf, offset, len); } public void write(int arg0) throws IOException { byte[] buf = new byte[1]; buf[0] = (byte)arg0; this.write(buf, 0, 1); } /** @deprecated Use 'close' instead */ public void cose() throws IOException { handler.close(); } public void close() throws IOException { handler.close(); } public void flush() throws IOException { handler.flush(); } }
5
0
1
add_only
--- a/src/org/bouncycastle/crypto/tls/TlsOuputStream.java +++ b/src/org/bouncycastle/crypto/tls/TlsOuputStream.java @@ -30,2 +30,7 @@ + /** @deprecated Use 'close' instead */ + public void cose() throws IOException + { + handler.close(); + }
--- a/src/org/bouncycastle/crypto/tls/TlsOuputStream.java +++ b/src/org/bouncycastle/crypto/tls/TlsOuputStream.java @@ ... @@ + /** @deprecated Use 'close' instead */ + public void cose() throws IOException + { + handler.close(); + }
--- a/src/org/bouncycastle/crypto/tls/TlsOuputStream.java +++ b/src/org/bouncycastle/crypto/tls/TlsOuputStream.java @@ -30,2 +30,7 @@ CON ADD /** @deprecated Use 'close' instead */ ADD public void cose() throws IOException ADD { ADD handler.close(); ADD } CON
<<<<<<< SEARCH } public void close() throws IOException ======= } /** @deprecated Use 'close' instead */ public void cose() throws IOException { handler.close(); } public void close() throws IOException >>>>>>> REPLACE
fajran/tempel
9d23940c430a4f95ec11b33362141ec2ffc3f533
src/tempel/models.py
python
agpl-3.0
Add is_editable and done_editable functions to Entry
from datetime import datetime, timedelta from django.db import models from django.conf import settings from tempel import utils def default_edit_expires(): return datetime.now() + timedelta(seconds=60*settings.TEMPEL_EDIT_AGE) class Entry(models.Model): content = models.TextField() language = models.CharField(max_length=20, choices=utils.get_languages()) created = models.DateTimeField(default=datetime.now) active = models.BooleanField(default=True) edit_token = models.CharField(max_length=8, default=utils.create_token, null=True) edit_expires = models.DateTimeField(default=default_edit_expires, null=True) class Meta: ordering = ['-created'] verbose_name_plural = "entries" def get_language(self): return utils.get_language(self.language) def get_mimetype(self): return utils.get_mimetype(self.language) def get_filename(self): return '%s.%s' % (self.id, self.get_extension()) def get_extension(self): return utils.get_extension(self.language) def __unicode__(self): return '<Entry: id=%s lang=%s>' % (self.id, self.language)
from datetime import datetime, timedelta from django.db import models from django.conf import settings from tempel import utils def default_edit_expires(): return datetime.now() + timedelta(seconds=60*settings.TEMPEL_EDIT_AGE) class Entry(models.Model): content = models.TextField() language = models.CharField(max_length=20, choices=utils.get_languages()) created = models.DateTimeField(default=datetime.now) active = models.BooleanField(default=True) edit_token = models.CharField(max_length=8, default=utils.create_token, null=True) edit_expires = models.DateTimeField(default=default_edit_expires, null=True) class Meta: ordering = ['-created'] verbose_name_plural = "entries" def get_language(self): return utils.get_language(self.language) def get_mimetype(self): return utils.get_mimetype(self.language) def get_filename(self): return '%s.%s' % (self.id, self.get_extension()) def get_extension(self): return utils.get_extension(self.language) def done_editable(self): self.edit_token = None self.save() def is_editable(self, token): time_ok = self.edit_expires is not None and self.edit_expires >= datetime.now() token_ok = token == self.edit_token return time_ok and time_ok def __unicode__(self): return '<Entry: id=%s lang=%s>' % (self.id, self.language)
9
0
1
add_only
--- a/src/tempel/models.py +++ b/src/tempel/models.py @@ -36,2 +36,11 @@ + def done_editable(self): + self.edit_token = None + self.save() + + def is_editable(self, token): + time_ok = self.edit_expires is not None and self.edit_expires >= datetime.now() + token_ok = token == self.edit_token + return time_ok and time_ok + def __unicode__(self):
--- a/src/tempel/models.py +++ b/src/tempel/models.py @@ ... @@ + def done_editable(self): + self.edit_token = None + self.save() + + def is_editable(self, token): + time_ok = self.edit_expires is not None and self.edit_expires >= datetime.now() + token_ok = token == self.edit_token + return time_ok and time_ok + def __unicode__(self):
--- a/src/tempel/models.py +++ b/src/tempel/models.py @@ -36,2 +36,11 @@ CON ADD def done_editable(self): ADD self.edit_token = None ADD self.save() ADD ADD def is_editable(self, token): ADD time_ok = self.edit_expires is not None and self.edit_expires >= datetime.now() ADD token_ok = token == self.edit_token ADD return time_ok and time_ok ADD CON def __unicode__(self):
<<<<<<< SEARCH return utils.get_extension(self.language) def __unicode__(self): return '<Entry: id=%s lang=%s>' % (self.id, self.language) ======= return utils.get_extension(self.language) def done_editable(self): self.edit_token = None self.save() def is_editable(self, token): time_ok = self.edit_expires is not None and self.edit_expires >= datetime.now() token_ok = token == self.edit_token return time_ok and time_ok def __unicode__(self): return '<Entry: id=%s lang=%s>' % (self.id, self.language) >>>>>>> REPLACE
CVBDL/EagleEye-App
c9c976ff2efc6b2f5c71f1ea3dccd35eb565fcbe
app/scripts/controllers/ChartSetCreationController.js
javascript
mit
Create chart set api updated
'use strict'; /** * @ngdoc function * @name eagleeye.controller:ChartSetCreationController * @description * # ChartSetCreationController * Controller of the eagleeye */ angular.module('eagleeye') .controller('ChartSetCreationController', [ '$state', 'EagleEyeWebService', function ($state, EagleEyeWebService) { var friendlyUrlPrefix = 's-', controller = this; EagleEyeWebService.getCharts().then(function(chartList) { controller.chartList = chartList; }); this.settings = { title: '', description: '', friendlyUrl: '', charts: [] }; this.addToChartSet = function(chart) { if (this.settings.charts.indexOf(chart._id) < 0) { this.settings.charts.push(chart._id); } }; this.save = function() { var data = JSON.stringify(this.settings); EagleEyeWebService.createChartSet(data).then(function(newChartSetId) { $state.go('chartSet', { id: newChartSetId }); }); }; } ]);
'use strict'; /** * @ngdoc function * @name eagleeye.controller:ChartSetCreationController * @description * # ChartSetCreationController * Controller of the eagleeye */ angular.module('eagleeye') .controller('ChartSetCreationController', [ '$state', 'EagleEyeWebService', function ($state, EagleEyeWebService) { var friendlyUrlPrefix = 's-', controller = this; EagleEyeWebService.getCharts().then(function(chartList) { controller.chartList = chartList; }); this.settings = { title: '', description: '', friendlyUrl: '', charts: [] }; this.addToChartSet = function(chart) { if (this.settings.charts.indexOf(chart._id) < 0) { this.settings.charts.push(chart._id); } }; this.save = function() { var data = JSON.stringify(this.settings); EagleEyeWebService.createChartSet(data).then(function(newChartSet) { $state.go('chartSet', { id: newChartSet._id }); }); }; } ]);
2
2
1
mixed
--- a/app/scripts/controllers/ChartSetCreationController.js +++ b/app/scripts/controllers/ChartSetCreationController.js @@ -37,5 +37,5 @@ - EagleEyeWebService.createChartSet(data).then(function(newChartSetId) { + EagleEyeWebService.createChartSet(data).then(function(newChartSet) { $state.go('chartSet', { - id: newChartSetId + id: newChartSet._id });
--- a/app/scripts/controllers/ChartSetCreationController.js +++ b/app/scripts/controllers/ChartSetCreationController.js @@ ... @@ - EagleEyeWebService.createChartSet(data).then(function(newChartSetId) { + EagleEyeWebService.createChartSet(data).then(function(newChartSet) { $state.go('chartSet', { - id: newChartSetId + id: newChartSet._id });
--- a/app/scripts/controllers/ChartSetCreationController.js +++ b/app/scripts/controllers/ChartSetCreationController.js @@ -37,5 +37,5 @@ CON DEL EagleEyeWebService.createChartSet(data).then(function(newChartSetId) { ADD EagleEyeWebService.createChartSet(data).then(function(newChartSet) { CON $state.go('chartSet', { DEL id: newChartSetId ADD id: newChartSet._id CON });
<<<<<<< SEARCH var data = JSON.stringify(this.settings); EagleEyeWebService.createChartSet(data).then(function(newChartSetId) { $state.go('chartSet', { id: newChartSetId }); }); ======= var data = JSON.stringify(this.settings); EagleEyeWebService.createChartSet(data).then(function(newChartSet) { $state.go('chartSet', { id: newChartSet._id }); }); >>>>>>> REPLACE