text
stringlengths
16
4.96k
positive
stringlengths
321
2.24k
negative
stringlengths
310
2.21k
Add slug variable to pass in the URL
from django.views.generic import ListView, DetailView from .models import Post class PostListView(ListView): context_object_name = 'posts' model = Post template_name = 'hermes/post_list.html' def get_queryset(self): return self.model.objects.order_by('created_on') class CategoryPostListView(PostListView): slug = None def get_queryset(self): category_slug = self.kwargs.get('slug', '') return self.model.objects.in_category(category_slug) class ArchivePostListView(PostListView): def get_queryset(self): year = self.kwargs.get('year', None) month = self.kwargs.get('month', None) day = self.kwargs.get('day', None) return self.model.objects.created_on(year=year, month=month, day=day) class PostDetail(DetailView): context_object_name = 'post' model = Post template_name = "hermes/post_detail.html"
from django.views.generic import ListView, DetailView from .models import Post class PostListView(ListView): context_object_name = 'posts' model = Post template_name = 'hermes/post_list.html' def get_queryset(self): return self.model.objects.order_by('created_on') class CategoryPostListView(PostListView): def get_queryset(self): category_slug = self.kwargs.get('slug', '') return self.model.objects.in_category(category_slug) class ArchivePostListView(PostListView): def get_queryset(self): year = self.kwargs.get('year', None) month = self.kwargs.get('month', None) day = self.kwargs.get('day', None) return self.model.objects.created_on(year=year, month=month, day=day) class PostDetail(DetailView): context_object_name = 'post' model = Post template_name = "hermes/post_detail.html"
Fix the spawning bug with autogenerated levels
package uk.co.alynn.games.ld30; import java.util.List; import uk.co.alynn.games.ld30.world.Planet; import uk.co.alynn.games.ld30.world.Adversary; public class SuperimposedWave extends Wave { private final Wave m_left; private final Wave m_right; public SuperimposedWave(Wave left, Wave right) { m_left = left; m_right = right; } @Override public boolean dispatch(List<Planet> planets, List<Adversary> adversaries, int tickID) { boolean ok = true; ok = m_left.dispatch(planets, adversaries, tickID) && ok; ok = m_right.dispatch(planets, adversaries, tickID) && ok; return ok; } @Override protected boolean tick(int tick) { throw new RuntimeException("Cannot tick a superwave"); } }
package uk.co.alynn.games.ld30; import java.util.List; import uk.co.alynn.games.ld30.world.Planet; import uk.co.alynn.games.ld30.world.Adversary; public class SuperimposedWave extends Wave { private final Wave m_left; private final Wave m_right; public SuperimposedWave(Wave left, Wave right) { m_left = left; m_right = right; } @Override public boolean dispatch(List<Planet> planets, List<Adversary> adversaries, int tickID) { boolean ok = true; ok = ok && m_left.dispatch(planets, adversaries, tickID); ok = ok && m_right.dispatch(planets, adversaries, tickID); return ok; } @Override protected boolean tick(int tick) { throw new RuntimeException("Cannot tick a superwave"); } }
Add a Python 3 classifier recommended by community
# # This is the regobj setuptools script. # Originally developed by Ryan Kelly, 2009. # # This script is placed in the public domain. # from distutils.core import setup # If we did a straight `import regobj` here we wouldn't be able # to build on non-win32 machines. regobj = {} try: execfile("regobj.py",regobj) except ImportError: pass VERSION = regobj["__version__"] NAME = "regobj" DESCRIPTION = "Pythonic object-based access to the Windows Registry." LONG_DESC = regobj["__doc__"] AUTHOR = "Ryan Kelly" AUTHOR_EMAIL = "ryan@rfk.id.au" URL="https://github.com/rfk/regobj" LICENSE = "MIT" KEYWORDS = "windows registry" setup(name=NAME, version=VERSION, author=AUTHOR, author_email=AUTHOR_EMAIL, url=URL, description=DESCRIPTION, long_description=LONG_DESC, license=LICENSE, keywords=KEYWORDS, py_modules=["regobj"], classifiers=[c.strip() for c in """ Intended Audience :: Developers License :: OSI Approved :: MIT License Programming Language :: Python :: 2 Programming Language :: Python :: 3 Topic :: Software Development :: Libraries :: Python Modules """.split('\n') if c.strip()], )
# # This is the regobj setuptools script. # Originally developed by Ryan Kelly, 2009. # # This script is placed in the public domain. # from distutils.core import setup # If we did a straight `import regobj` here we wouldn't be able # to build on non-win32 machines. regobj = {} try: execfile("regobj.py",regobj) except ImportError: pass VERSION = regobj["__version__"] NAME = "regobj" DESCRIPTION = "Pythonic object-based access to the Windows Registry." LONG_DESC = regobj["__doc__"] AUTHOR = "Ryan Kelly" AUTHOR_EMAIL = "ryan@rfk.id.au" URL="https://github.com/rfk/regobj" LICENSE = "MIT" KEYWORDS = "windows registry" setup(name=NAME, version=VERSION, author=AUTHOR, author_email=AUTHOR_EMAIL, url=URL, description=DESCRIPTION, long_description=LONG_DESC, license=LICENSE, keywords=KEYWORDS, py_modules=["regobj"], )
Fix composer for inplace in dev mode
<?php require_once __DIR__ . '/deployer/recipe/yii2-app-advanced.php'; require_once __DIR__ . '/deployer/recipe/yii-configure.php'; require_once __DIR__ . '/deployer/recipe/in-place.php'; if (!file_exists (__DIR__ . '/deployer/stage/servers.yml')) { die('Please create "' . __DIR__ . '/deployer/stage/servers.yml" before continuing.' . "\n"); } serverList(__DIR__ . '/deployer/stage/servers.yml'); set('repository', '{{repository}}'); set('default_stage', 'production'); set('keep_releases', 2); set('writable_use_sudo', false); // Using sudo in writable commands? task('deploy:configure_composer', function () { $stage = env('app.stage'); if($stage == 'dev') { env('composer_options', 'install --verbose --no-progress --no-interaction'); } })->desc('Configure composer'); // uncomment the next two lines to run migrations //after('deploy:symlink', 'deploy:run_migrations'); //after('inplace:configure', 'inplace:run_migrations'); before('deploy:vendors', 'deploy:configure_composer'); before('inplace:vendors', 'deploy:configure_composer'); before('deploy:symlink', 'deploy:configure');
<?php require_once __DIR__ . '/deployer/recipe/yii2-app-advanced.php'; require_once __DIR__ . '/deployer/recipe/yii-configure.php'; require_once __DIR__ . '/deployer/recipe/in-place.php'; if (!file_exists (__DIR__ . '/deployer/stage/servers.yml')) { die('Please create "' . __DIR__ . '/deployer/stage/servers.yml" before continuing.' . "\n"); } serverList(__DIR__ . '/deployer/stage/servers.yml'); set('repository', '{{repository}}'); set('default_stage', 'production'); set('keep_releases', 2); set('writable_use_sudo', false); // Using sudo in writable commands? task('deploy:configure_composer', function () { $stage = env('app.stage'); if($stage == 'dev') { env('composer_options', 'install --verbose --no-progress --no-interaction'); } })->desc('Configure composer'); // uncomment the next two lines to run migrations //after('deploy:symlink', 'deploy:run_migrations'); //after('inplace:configure', 'inplace:run_migrations'); before('deploy:vendors', 'deploy:configure_composer'); before('deploy:symlink', 'deploy:configure');
Update implicit default for view layout
/** * getImplicitDefaults() * * Get a dictionary of implicit defaults this hook would like to merge * into `sails.config` when Sails is loaded. * * @param {Dictionary} existingConfig * Existing configuration which has already been loaded * e.g. the Sails app path, and any config overrides (programmtic, from .sailsrc, etc) * * @returns {Dictionary} */ module.exports = function getImplicitDefaults (existingConfig) { return { views: { // Extension for view files extension: 'ejs', // Layout is on by default, in the top level of the view directory // false === don't use a layout // string === path to layout (absolute or relative to views directory), without extension layout: 'layout' }, paths: { views: existingConfig.appPath + '/views', layout: existingConfig.appPath + '/views/layout.ejs' } }; };
/** * getImplicitDefaults() * * Get a dictionary of implicit defaults this hook would like to merge * into `sails.config` when Sails is loaded. * * @param {Dictionary} existingConfig * Existing configuration which has already been loaded * e.g. the Sails app path, and any config overrides (programmtic, from .sailsrc, etc) * * @returns {Dictionary} */ module.exports = function getImplicitDefaults (existingConfig) { return { views: { // Extension for view files extension: 'ejs', // Layout is on by default, in the top level of the view directory // true === use default // false === don't use a layout // string === path to layout layout: true }, paths: { views: existingConfig.appPath + '/views', layout: existingConfig.appPath + '/views/layout.ejs' } }; };
Use errors.New() instead of fmt.Errorf() where appropriate.
package armoredcrypt import ( "fmt" "encoding/base64" "strings" "errors" ) const ( _MAGIC_PREFIX = "saltybox" _V1_MAGIC = "saltybox1:" ) func Wrap(body []byte) string { encoded := base64.RawURLEncoding.EncodeToString(body) return fmt.Sprintf("%s%s", _V1_MAGIC, encoded) } func Unwrap(varmoredBody string) ([]byte, error) { if len(varmoredBody) < len(_V1_MAGIC) { return nil, errors.New("input size smaller than magic marker; likely truncated") } if strings.HasPrefix(varmoredBody, _V1_MAGIC) { armoredBody := strings.TrimPrefix(varmoredBody, _V1_MAGIC) body, err := base64.RawURLEncoding.DecodeString(armoredBody) if err != nil { return nil, fmt.Errorf("base64 decoding failed: %s", err) } return body, nil } else if strings.HasPrefix(varmoredBody, _MAGIC_PREFIX) { return nil, errors.New("input claims to be saltybox, but not a version we support") } else { return nil, errors.New("input unrecognized as saltybox data") } }
package armoredcrypt import ( "fmt" "encoding/base64" "strings" ) const ( _MAGIC_PREFIX = "saltybox" _V1_MAGIC = "saltybox1:" ) func Wrap(body []byte) string { encoded := base64.RawURLEncoding.EncodeToString(body) return fmt.Sprintf("%s%s", _V1_MAGIC, encoded) } func Unwrap(varmoredBody string) ([]byte, error) { if len(varmoredBody) < len(_V1_MAGIC) { return nil, fmt.Errorf("input size smaller than magic marker; likely truncated") } if strings.HasPrefix(varmoredBody, _V1_MAGIC) { armoredBody := strings.TrimPrefix(varmoredBody, _V1_MAGIC) body, err := base64.RawURLEncoding.DecodeString(armoredBody) if err != nil { return nil, fmt.Errorf("base64 decoding failed: %s", err) } return body, nil } else if strings.HasPrefix(varmoredBody, _MAGIC_PREFIX) { return nil, fmt.Errorf("input claims to be saltybox, but not a version we support") } else { return nil, fmt.Errorf("input unrecognized as saltybox data") } }
:bug: Use defaultColors if config not provided References #9
'use strict'; const foregroundColor = '#A1B0B8'; const borderColor = '#252525'; const oceanicColors = require('./colorschemes/oceanic.js'); const defaultColors = require('./colorschemes/regular.js'); const getColorScheme = cfg => { if (cfg.materialshell) { switch (cfg.materialshell.theme) { case 'oceanic': return oceanicColors; case 'dark': return defaultColors; default: return defaultColors; } } }; exports.decorateConfig = config => { const colors = getColorScheme(config) || defaultColors; const backgroundColor = colors.background; return Object.assign({}, config, { cursorColor: colors.palette.red, cursorShape: 'UNDERLINE', foregroundColor, backgroundColor, borderColor, css: `${config.css || ''} .tab_tab:before {border-left: 1px solid;} .tab_active {background: rgba(255,255,255,0.05);} .tab_active:before {border-color: ${colors.palette.red};} `, colors: colors.palette }); };
'use strict'; const foregroundColor = '#A1B0B8'; const borderColor = '#252525'; const oceanicColors = require('./colorschemes/oceanic.js'); const defaultColors = require('./colorschemes/regular.js'); const getColorScheme = cfg => { if (cfg.materialshell) { switch (cfg.materialshell.theme) { case 'oceanic': return oceanicColors; case 'dark': return defaultColors; default: return defaultColors; } } }; exports.decorateConfig = config => { const colors = getColorScheme(config); const backgroundColor = colors.background; return Object.assign({}, config, { cursorColor: colors.palette.red, cursorShape: 'UNDERLINE', foregroundColor, backgroundColor, borderColor, css: `${config.css || ''} .tab_tab:before {border-left: 1px solid;} .tab_active {background: rgba(255,255,255,0.05);} .tab_active:before {border-color: ${colors.palette.red};} `, colors: colors.palette }); };
Add failing test for skin tones As per issue #3.
var tape = require('tape') var emojiStrip = require('..') var emoji tape('strip emoji', function (t) { emoji = 'thumbs-up👍 for staying strong💪 without emoji please🙏' t.equal(emojiStrip(emoji), 'thumbs-up for staying strong without emoji please') emoji = 'dealing with emoji😡 makes me feel like poop💩' t.equal(emojiStrip(emoji), 'dealing with emoji makes me feel like poop') t.end() }) tape('strip emoji with skin tones', function (t) { emoji = 'skin tones🙌🏾 should be oka👌🏿y' t.equal(emojiStrip(emoji), 'skin tones should be okay') emoji = 'regardless of which tone we should be 👍🏽good' t.equal(emojiStrip(emoji), 'regardless of which tone we should be good') emoji = 'or else spock🖖🏻 will punch👊🏼 us' t.equal(emojiStrip(emoji), 'or else spock will punch us') t.end() }) tape('don’t magically remove digits', function (t) { emoji = '9999 ways to die🔫 in the west' t.equal(emojiStrip(emoji), '9999 ways to die in the west') emoji = '⚡zero 0 🐗one 1 🐴two 2 🐋three 3 🎃four 4 🍌five 5 🍻six 6 💣seven 7 🍪eight 8 eight emoji ah ah ah' t.equal(emojiStrip(emoji), 'zero 0 one 1 two 2 three 3 four 4 five 5 six 6 seven 7 eight 8 eight emoji ah ah ah') t.end() })
var tape = require('tape') var emojiStrip = require('..') var emoji tape('strip emoji', function (t) { emoji = 'thumbs-up👍 for staying strong💪 without emoji please🙏' t.equal(emojiStrip(emoji), 'thumbs-up for staying strong without emoji please') emoji = 'dealing with emoji😡 makes me feel like poop💩' t.equal(emojiStrip(emoji), 'dealing with emoji makes me feel like poop') t.end() }) tape('don’t magically remove digits', function (t) { emoji = '9999 ways to die🔫 in the west' t.equal(emojiStrip(emoji), '9999 ways to die in the west') emoji = '⚡zero 0 🐗one 1 🐴two 2 🐋three 3 🎃four 4 🍌five 5 🍻six 6 💣seven 7 🍪eight 8 eight emoji ah ah ah' t.equal(emojiStrip(emoji), 'zero 0 one 1 two 2 three 3 four 4 five 5 six 6 seven 7 eight 8 eight emoji ah ah ah') t.end() })
Add a die function to quickly throw errors
var round = function(num) { return (num + 0.5) | 0 } var floor = function(num) { return num | 0 } var ceil = function(num) { return (num | 0) == num ? num | 0 : (num + 1) | 0 } var abs = Math.abs var sqrt = Math.sqrt var log = function(num) { var result = Math.log(num) return result } var signed_log = function(num) { var sign = num < 0 ? -1 : 1; var result = Math.log(abs(num)) return sign * result } var max = function(a, b) { return (a > b) ? a : b; } var min = function(a, b) { return (a < b) ? a : b; } var compact_array = function(old_all) { var new_all = [] for (var obj in old_all) { if (old_all[obj] != undefined) { new_all.push(old_all[obj]) } } return new_all; } var count_object_keys = function(obj) { var result = 0; for (var prop in obj) { if (obj.hasOwnProperty(prop)) result++ } return result } var warn = function() { console.log.apply(console, arguments) } var die = function(e) { throw new Error(e) }
var round = function(num) { return (num + 0.5) | 0 } var floor = function(num) { return num | 0 } var ceil = function(num) { return (num | 0) == num ? num | 0 : (num + 1) | 0 } var abs = Math.abs var sqrt = Math.sqrt var log = function(num) { var result = Math.log(num) return result } var signed_log = function(num) { var sign = num < 0 ? -1 : 1; var result = Math.log(abs(num)) return sign * result } var max = function(a, b) { return (a > b) ? a : b; } var min = function(a, b) { return (a < b) ? a : b; } var compact_array = function(old_all) { var new_all = [] for (var obj in old_all) { if (old_all[obj] != undefined) { new_all.push(old_all[obj]) } } return new_all; } var count_object_keys = function(obj) { var result = 0; for (var prop in obj) { if (obj.hasOwnProperty(prop)) result++ } return result } var warn = function() { console.log.apply(console, arguments) }
Remove author tags from javadoc
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package groovy.transform; import java.lang.annotation.ElementType; import java.lang.annotation.Retention; import java.lang.annotation.RetentionPolicy; import java.lang.annotation.Target; /** * The Generated annotation is used to mark members that have been generated. */ @Target({ElementType.METHOD, ElementType.CONSTRUCTOR, ElementType.TYPE, ElementType.FIELD}) @Retention(RetentionPolicy.RUNTIME) public @interface Generated { }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package groovy.transform; import java.lang.annotation.ElementType; import java.lang.annotation.Retention; import java.lang.annotation.RetentionPolicy; import java.lang.annotation.Target; /** * The Generated annotation is used to mark members that have been generated. * * @author Andres Almiray * @author Jochen Theodorou * @author Mark Hoffmann */ @Target({ElementType.METHOD, ElementType.CONSTRUCTOR, ElementType.TYPE, ElementType.FIELD}) @Retention(RetentionPolicy.RUNTIME) public @interface Generated { }
Allow --renovate-fork Cli flag for onboarding. Fixes https://github.com/renovateapp/renovate/issues/1371.
const { detectPackageFiles } = require('../../../../manager'); const { createOnboardingBranch } = require('./create'); const { rebaseOnboardingBranch } = require('./rebase'); const { isOnboarded, onboardingPrExists } = require('./check'); async function checkOnboardingBranch(config) { logger.debug('checkOnboarding()'); logger.trace({ config }); const repoIsOnboarded = await isOnboarded(config); if (repoIsOnboarded) { logger.debug('Repo is onboarded'); return { ...config, repoIsOnboarded }; } if (config.isFork && !config.renovateFork) { throw new Error('fork'); } logger.info('Repo is not onboarded'); if (await onboardingPrExists(config)) { logger.debug('Onboarding PR already exists'); await rebaseOnboardingBranch(config); } else { logger.debug('Onboarding PR does not exist'); if ((await detectPackageFiles(config)).length === 0) { throw new Error('no-package-files'); } logger.info('Need to create onboarding PR'); await createOnboardingBranch(config); } await platform.setBaseBranch(`renovate/configure`); const branchList = [`renovate/configure`]; return { ...config, repoIsOnboarded, branchList }; } module.exports = { checkOnboardingBranch, };
const { detectPackageFiles } = require('../../../../manager'); const { createOnboardingBranch } = require('./create'); const { rebaseOnboardingBranch } = require('./rebase'); const { isOnboarded, onboardingPrExists } = require('./check'); async function checkOnboardingBranch(config) { logger.debug('checkOnboarding()'); logger.trace({ config }); const repoIsOnboarded = await isOnboarded(config); if (repoIsOnboarded) { logger.debug('Repo is onboarded'); return { ...config, repoIsOnboarded }; } if (config.isFork) { throw new Error('fork'); } logger.info('Repo is not onboarded'); if (await onboardingPrExists(config)) { logger.debug('Onboarding PR already exists'); await rebaseOnboardingBranch(config); } else { logger.debug('Onboarding PR does not exist'); if ((await detectPackageFiles(config)).length === 0) { throw new Error('no-package-files'); } logger.info('Need to create onboarding PR'); await createOnboardingBranch(config); } await platform.setBaseBranch(`renovate/configure`); const branchList = [`renovate/configure`]; return { ...config, repoIsOnboarded, branchList }; } module.exports = { checkOnboardingBranch, };
Swap the palindome spec for subset sum
"use strict"; var Sentient = require("../../lib/sentient"); describe("Exercise 4", function () { var machineCode = Sentient.compile({ instructions: [ // Add your instructions here. ] }); it("can solve the subset sum problem", function () { var result = Sentient.run(machineCode, { numbers: [5, 8, 4, 11, 6], total: 20 }); expect(result.members).toEqual([ true, false, true, true, false // 5, 4, 11 ]); }); it("can find a solution for 15", function () { var result = Sentient.run(machineCode, { numbers: [5, 8, 4, 11, 6], total: 15 }); expect(result.members).toEqual([ true, false, true, false, true // 5, 4, 6 ]); }); it("does not find a solution for 7", function () { var result = Sentient.run(machineCode, { numbers: [5, 8, 4, 11, 6], total: 7 }); expect(result).toEqual({}); }); });
"use strict"; var Sentient = require("../../lib/sentient"); describe("Exercise 4", function () { var machineCode = Sentient.compile({ instructions: [ // Add your instructions here. ] }); it("can find an array that is a palindrome of 3 numbers", function () { var result = Sentient.run(machineCode, {}); var palindrome = result.palindrome; var reverse = palindrome.slice(0).reverse(); expect(palindrome).toEqual(reverse); }); it("can find a solution that starts with a 5", function () { var result = Sentient.run(machineCode, { palindrome: { 0: 5 } }); var palindrome = result.palindrome; expect(palindrome[0]).toEqual(5); expect(palindrome[2]).toEqual(5); }); it("finds no solution that starts with 5 and ends with 3", function () { var result = Sentient.run(machineCode, { palindrome: { 0: 5, 2: 3 } }); expect(result).toEqual({}); }); it("only finds solutions that multiply to equal a given number", function () { var result = Sentient.run(machineCode, { total: 300 }); var palindrome = result.palindrome; expect(palindrome[0] * palindrome[1] * palindrome[2]).toEqual(300); result = Sentient.run(machineCode, { total: -120 }); palindrome = result.palindrome; expect(palindrome[0] * palindrome[1] * palindrome[2]).toEqual(-120); }); });
Use same syntax for all assets
var elixir = require('laravel-elixir'); /* |-------------------------------------------------------------------------- | Elixir Asset Management |-------------------------------------------------------------------------- | | Elixir provides a clean, fluent API for defining some basic Gulp tasks | for your Laravel application. By default, we are compiling the Sass | file for our application, as well as publishing vendor resources. | */ elixir(function(mix) { mix.sass('app.scss'); //mix.less(''); mix.copy( './public/js/libs/semantic/dist/themes', './public/css/themes' ); mix.styles([ './public/js/libs/semantic/dist/semantic.min.css' ], 'public/css/nestor.css'); // mix.scripts([ // './resources/assets/bower/jquery/dist/jquery.js', // './resources/assets/bower/bootstrap-sass-official/assets/javascripts/bootstrap.min.js' // ], // 'public/js/nestor.js'); });
var elixir = require('laravel-elixir'); /* |-------------------------------------------------------------------------- | Elixir Asset Management |-------------------------------------------------------------------------- | | Elixir provides a clean, fluent API for defining some basic Gulp tasks | for your Laravel application. By default, we are compiling the Sass | file for our application, as well as publishing vendor resources. | */ elixir(function(mix) { mix.sass('app.scss'); //mix.less(''); mix.copy( './public/js/libs/semantic/dist/themes', 'public/css/themes' ); mix.styles([ './public/js/libs/semantic/dist/semantic.min.css' ], 'public/css/nestor.css'); // mix.scripts([ // './resources/assets/bower/jquery/dist/jquery.js', // './resources/assets/bower/bootstrap-sass-official/assets/javascripts/bootstrap.min.js' // ], // 'public/js/nestor.js'); });
Fix bug on fudge wrapper
import fudge import inspect class MethodNotAvailableInMockedObjectException(Exception): pass def getMock(class_to_mock): return FudgeWrapper(class_to_mock) class FudgeWrapper(fudge.Fake): def __init__(self, class_to_mock): self._class_to_mock = class_to_mock self._declared_calls = {} self._attributes = {} super(FudgeWrapper, self).__init__(self._class_to_mock.__name__) def provides(self, call_name): self._check_method_availability_on_mocked_object(call_name) return super(FudgeWrapper, self).provides(call_name) def _check_method_availability_on_mocked_object(self, call_name): if call_name not in dir(self._class_to_mock): raise MethodNotAvailableInMockedObjectException
import fudge import inspect class MethodNotAvailableInMockedObjectException(Exception): pass def getMock(class_to_mock): return FudgeWrapper(class_to_mock) class FudgeWrapper(fudge.Fake): def __init__(self, class_to_mock): self._class_to_mock = class_to_mock self._declared_calls = {} self._attributes = {} super(FudgeWrapper, self).__init__(self._class_to_mock.__name__) def provides(self, call_name): self._check_method_availability_on_mocked_object(call_name) super(FudgeWrapper, self).provides(call_name) def _check_method_availability_on_mocked_object(self, call_name): if call_name not in dir(self._class_to_mock): raise MethodNotAvailableInMockedObjectException
Add metrics to monitor Redis connections
package com.bendb.dropwizard.redis; import com.bendb.dropwizard.redis.jersey.JedisPoolBinder; import com.codahale.metrics.Gauge; import io.dropwizard.Configuration; import io.dropwizard.ConfiguredBundle; import io.dropwizard.setup.Bootstrap; import io.dropwizard.setup.Environment; import redis.clients.jedis.JedisPool; public abstract class JedisBundle<C extends Configuration> implements ConfiguredBundle<C>, JedisConfiguration<C> { private JedisPool pool; public JedisPool getPool() { return pool; } @Override public void initialize(Bootstrap<?> bootstrap) { } @Override public void run(C configuration, Environment environment) throws Exception { pool = getJedisFactory(configuration).build(environment); environment.metrics().register("redis.connection.idle", (Gauge) () -> pool.getNumIdle()); environment.metrics().register("redis.connection.active", (Gauge) () -> pool.getNumActive()); environment.metrics().register("redis.connection.wait", (Gauge) () -> pool.getNumWaiters()); environment.healthChecks().register("redis", new JedisHealthCheck(pool)); environment.jersey().register(new JedisPoolBinder(pool)); } }
package com.bendb.dropwizard.redis; import com.bendb.dropwizard.redis.jersey.JedisPoolBinder; import io.dropwizard.Configuration; import io.dropwizard.ConfiguredBundle; import io.dropwizard.setup.Bootstrap; import io.dropwizard.setup.Environment; import redis.clients.jedis.JedisPool; public abstract class JedisBundle<C extends Configuration> implements ConfiguredBundle<C>, JedisConfiguration<C> { private JedisPool pool; public JedisPool getPool() { return pool; } @Override public void initialize(Bootstrap<?> bootstrap) { } @Override public void run(C configuration, Environment environment) throws Exception { pool = getJedisFactory(configuration).build(environment); environment.healthChecks().register("redis", new JedisHealthCheck(pool)); environment.jersey().register(new JedisPoolBinder(pool)); } }
Make use of get_pkg_data_filename for icon
# Licensed under a 3-clause BSD style license - see LICENSE.rst """Defines constants used in `astropy.vo.samp`.""" import os from ...utils.data import get_pkg_data_filename __all__ = ['SAMP_STATUS_OK', 'SAMP_STATUS_WARNING', 'SAMP_STATUS_ERROR', 'SAMP_HUB_SINGLE_INSTANCE', 'SAMP_HUB_MULTIPLE_INSTANCE', 'SAFE_MTYPES', 'SAMP_ICON'] __profile_version__ = "1.3" #: General constant for samp.ok status string SAMP_STATUS_OK = "samp.ok" #: General constant for samp.warning status string SAMP_STATUS_WARNING = "samp.warning" #: General constant for samp.error status string SAMP_STATUS_ERROR = "samp.error" #: General constant to specify single instance Hub running mode SAMP_HUB_SINGLE_INSTANCE = "single" #: General constant to specify multiple instance Hub running mode SAMP_HUB_MULTIPLE_INSTANCE = "multiple" SAFE_MTYPES = ["samp.app.*", "samp.msg.progress", "table.*", "image.*", "coord.*", "spectrum.*", "bibcode.*", "voresource.*"] with open(get_pkg_data_filename('data/astropy_icon.png'), 'rb') as f: SAMP_ICON = f.read() try: import ssl except ImportError: SSL_SUPPORT = False else: SSL_SUPPORT = True del ssl
# Licensed under a 3-clause BSD style license - see LICENSE.rst """Defines constants used in `astropy.vo.samp`.""" import os DATA_DIR = os.path.join(os.path.dirname(__file__), 'data') __all__ = ['SAMP_STATUS_OK', 'SAMP_STATUS_WARNING', 'SAMP_STATUS_ERROR', 'SAMP_HUB_SINGLE_INSTANCE', 'SAMP_HUB_MULTIPLE_INSTANCE', 'SAFE_MTYPES', 'SAMP_ICON'] __profile_version__ = "1.3" #: General constant for samp.ok status string SAMP_STATUS_OK = "samp.ok" #: General constant for samp.warning status string SAMP_STATUS_WARNING = "samp.warning" #: General constant for samp.error status string SAMP_STATUS_ERROR = "samp.error" #: General constant to specify single instance Hub running mode SAMP_HUB_SINGLE_INSTANCE = "single" #: General constant to specify multiple instance Hub running mode SAMP_HUB_MULTIPLE_INSTANCE = "multiple" SAFE_MTYPES = ["samp.app.*", "samp.msg.progress", "table.*", "image.*", "coord.*", "spectrum.*", "bibcode.*", "voresource.*"] with open(os.path.join(DATA_DIR, 'astropy_icon.png'), 'rb') as f: SAMP_ICON = f.read() try: import ssl except ImportError: SSL_SUPPORT = False else: SSL_SUPPORT = True del ssl
Add documentation for public class and method
package com.janosgyerik.tools.util; import java.util.Arrays; /** * Utility class to work with matrices */ public class MatrixUtils { private MatrixUtils() { // utility class, forbidden constructor } /** * Format matrix as String, by joining Arrays.toString of each row * @param matrix the matrix to format * @return the matrix String */ public static String toString(int[][] matrix) { StringBuilder builder = new StringBuilder(); builder.append("["); if (matrix.length > 0) { builder.append(Arrays.toString(matrix[0])); for (int i = 1; i < matrix.length; ++i) { builder.append(", ").append(Arrays.toString(matrix[i])); } } builder.append("]"); return builder.toString(); } }
package com.janosgyerik.tools.util; import java.util.Arrays; public class MatrixUtils { private MatrixUtils() { // utility class, forbidden constructor } public static String toString(int[][] matrix) { StringBuilder builder = new StringBuilder(); builder.append("["); if (matrix.length > 0) { builder.append(Arrays.toString(matrix[0])); for (int i = 1; i < matrix.length; ++i) { builder.append(", ").append(Arrays.toString(matrix[i])); } } builder.append("]"); return builder.toString(); } }
Update the profile alignment example.
from alignment.sequence import Sequence from alignment.vocabulary import Vocabulary from alignment.sequencealigner import SimpleScoring, GlobalSequenceAligner from alignment.profile import Profile from alignment.profilealigner import SoftScoring, GlobalProfileAligner # Create sequences to be aligned. a = Sequence('what a beautiful day'.split()) b = Sequence('what a disappointingly bad day'.split()) print 'Sequence A:', a print 'Sequence B:', b print # Create a vocabulary and encode the sequences. v = Vocabulary() aEncoded = v.encodeSequence(a) bEncoded = v.encodeSequence(b) print 'Encoded A:', aEncoded print 'Encoded B:', bEncoded print # Create a scoring and align the sequences using global aligner. scoring = SimpleScoring(2, -1) aligner = GlobalSequenceAligner(scoring, -2) score, alignments = aligner.align(aEncoded, bEncoded, backtrace=True) # Create sequence profiles out of alignments. profiles = [Profile.fromSequenceAlignment(a) for a in alignments] for encoded in profiles: profile = v.decodeProfile(encoded) print profile print # Create a soft scoring and align the first profile against sequence A. scoring = SoftScoring(scoring) aligner = GlobalProfileAligner(scoring, -2) score, alignments = aligner.align(profiles[0], Profile.fromSequence(aEncoded), backtrace=True) for encoded in alignments: alignment = v.decodeProfileAlignment(encoded) print alignment
# Create sequences to be aligned. from alignment.sequence import Sequence a = Sequence("what a beautiful day".split()) b = Sequence("what a disappointingly bad day".split()) print "Sequence A:", a print "Sequence B:", b print # Create a vocabulary and encode the sequences. from alignment.vocabulary import Vocabulary v = Vocabulary() aEncoded = v.encodeSequence(a) bEncoded = v.encodeSequence(b) print "Encoded A:", aEncoded print "Encoded B:", bEncoded print # Create a scoring and align the sequences using global aligner. from alignment.sequencealigner import SimpleScoring, GlobalSequenceAligner scoring = SimpleScoring(2, -1) aligner = GlobalSequenceAligner(scoring, -2) score, alignments = aligner.align(aEncoded, bEncoded, backtrace=True) # Create sequence profiles out of alignments. from alignment.profile import Profile profiles = [Profile.fromSequenceAlignment(a) for a in alignments] for encoded in profiles: profile = v.decodeProfile(encoded) print profile print # Create a soft scoring and align the first profile against sequence A. from alignment.profilealigner import SoftScoring, GlobalProfileAligner scoring = SoftScoring(scoring) aligner = GlobalProfileAligner(scoring, -2) score, alignments = aligner.align(profiles[0], Profile.fromSequence(aEncoded), backtrace=True) for encoded in alignments: alignment = v.decodeProfileAlignment(encoded) print alignment
Fix string interpolation for displaying time periods The interpolation was done in a way that actually subtracted two strings from each other instead of putting the literal '-' between them.
import dayjs from './dayjs.js' import { i18n } from '@/plugins/i18n' // this imports i18-plugin from "frontend" or from "print", depending on where the helper is used function dateShort(dateTimeString) { return dayjs.utc(dateTimeString).format(i18n.tc('global.datetime.dateShort')) } function dateLong(dateTimeString) { return dayjs.utc(dateTimeString).format(i18n.tc('global.datetime.dateLong')) } function hourShort(dateTimeString) { return dayjs.utc(dateTimeString).format(i18n.tc('global.datetime.hourShort')) } // short format of dateTime range // doesn't repeat end date if on the same day function rangeShort(start, end) { let result = '' result += dateShort(start) result += ' ' result += hourShort(start) result += ' - ' if (dateShort(start) !== dateShort(end)) { result += dateShort(end) result += ' ' } result += hourShort(end) return result } // format of date range function dateRange(start, end) { if (dateLong(start) === dateLong(end)) { return dateLong(start) } return `${dateShort(start)} - ${dateLong(end)}` } export { dateShort, dateLong, hourShort, dateRange, rangeShort }
import dayjs from './dayjs.js' import { i18n } from '@/plugins/i18n' // this imports i18-plugin from "frontend" or from "print", depending on where the helper is used function dateShort(dateTimeString) { return dayjs.utc(dateTimeString).format(i18n.tc('global.datetime.dateShort')) } function dateLong(dateTimeString) { return dayjs.utc(dateTimeString).format(i18n.tc('global.datetime.dateLong')) } function hourShort(dateTimeString) { return dayjs.utc(dateTimeString).format(i18n.tc('global.datetime.hourShort')) } // short format of dateTime range // doesn't repeat end date if on the same day function rangeShort(start, end) { let result = '' result += dateShort(start) result += ' ' result += hourShort(start) result += ' - ' if (dateShort(start) !== dateShort(end)) { result += dateShort(end) result += ' ' } result += hourShort(end) return result } // format of date range function dateRange(start, end) { if (dateLong(start) === dateLong(end)) { return dateLong(start) } return `${dateShort(start) - dateLong()}` } export { dateShort, dateLong, hourShort, dateRange, rangeShort }
Add Trove classifiers for 2.x and 3.x
from distutils.core import setup def main(): setup( name = 'logging_levels', packages=['logging_levels'], package_dir = {'logging_levels':'logging_levels'}, version = open('VERSION.txt').read().strip(), author='Mike Thornton', author_email='six8@devdetails.com', url='https://github.com/six8/logging-levels', download_url='https://github.com/six8/logging-levels', keywords=['logging'], license='MIT', description="Add logging levels for when DEBUG just isn't enough.", classifiers = [ "Programming Language :: Python", "Intended Audience :: Developers", "License :: OSI Approved :: MIT License", "Programming Language :: Python :: 2.6", "Programming Language :: Python :: 2.7", "Programming Language :: Python :: 3", "Programming Language :: Python :: 3.2", "Programming Language :: Python :: 3.3", "Topic :: Software Development :: Libraries :: Python Modules", ], long_description=open('README.rst').read(), ) if __name__ == '__main__': main()
from distutils.core import setup def main(): setup( name = 'logging_levels', packages=['logging_levels'], package_dir = {'logging_levels':'logging_levels'}, version = open('VERSION.txt').read().strip(), author='Mike Thornton', author_email='six8@devdetails.com', url='https://github.com/six8/logging-levels', download_url='https://github.com/six8/logging-levels', keywords=['logging'], license='MIT', description="Add logging levels for when DEBUG just isn't enough.", classifiers = [ "Programming Language :: Python", "Intended Audience :: Developers", "Topic :: Software Development :: Libraries :: Python Modules", ], long_description=open('README.rst').read(), ) if __name__ == '__main__': main()
Revert "Remove explicit link to homepage view in i3p_base" This reverts commit 3d4327f6d9d71c6b396b0655de81373210417aba.
#-- encoding: utf-8 -- from django.conf.urls.defaults import patterns, url from haystack.views import search_view_factory import views import ajax urlpatterns = patterns('', url(r'^$', views.homepage, name='i4p-index'), url(r'^homepage/ajax/slider/bestof/$', ajax.slider_bestof, name='i4p-homepage-ajax-slider-bestof'), url(r'^homepage/ajax/slider/latest/$', ajax.slider_latest, name='i4p-homepage-ajax-slider-latest'), url(r'^homepage/ajax/slider/commented/$', ajax.slider_most_commented, name='i4p-homepage-ajax-slider-commented'), url(r'^history/check_version/(?P<pk>[\d]+)$', views.VersionActivityCheckView.as_view(), name='history-check-version'), url(r'^search/', search_view_factory(view_class=views.SearchView), name='i4p-search'), url(r'^location/(?P<location_id>\d+)', views.LocationEditView.as_view(), name='i4p-location-edit'), url(r'^locations/$', views.LocationListView.as_view(), name='i4p-location-list'), url(r'^locations/missing/(?P<missing_field_name>\w+)$', views.LocationListView.as_view(), name='i4p-location-missing-list'), )
#-- encoding: utf-8 -- from django.conf.urls.defaults import patterns, url from haystack.views import search_view_factory import views import ajax urlpatterns = patterns('', #url(r'^$', views.homepage, name='i4p-index'), url(r'^homepage/ajax/slider/bestof/$', ajax.slider_bestof, name='i4p-homepage-ajax-slider-bestof'), url(r'^homepage/ajax/slider/latest/$', ajax.slider_latest, name='i4p-homepage-ajax-slider-latest'), url(r'^homepage/ajax/slider/commented/$', ajax.slider_most_commented, name='i4p-homepage-ajax-slider-commented'), url(r'^history/check_version/(?P<pk>[\d]+)$', views.VersionActivityCheckView.as_view(), name='history-check-version'), url(r'^search/', search_view_factory(view_class=views.SearchView), name='i4p-search'), url(r'^location/(?P<location_id>\d+)', views.LocationEditView.as_view(), name='i4p-location-edit'), url(r'^locations/$', views.LocationListView.as_view(), name='i4p-location-list'), url(r'^locations/missing/(?P<missing_field_name>\w+)$', views.LocationListView.as_view(), name='i4p-location-missing-list'), )
Check for FailureData is an error ocurr Signed-off-by: Andres Garcia <a32ab6d0ea593a061705598410d773d0d3e9ce01@gmail.com>
<?php namespace Omnipay\Paysimple\Message; class Response implements \Omnipay\Common\Message\ResponseInterface { public function __construct($request, $response) { $this->request = $request; $this->response = $response; } public function getRequest() { $this->request; } public function isSuccessful() { $failureData = false; $message = $this->getMessage(); if(array_key_exists("FailureData", $message['Response']) && is_array($message['Response']['FailureData'])) { $failureData = true; } return ($this->response->getStatusCode() >= 200 && $this->response->getStatusCode() <= 299 && !$failureData); } public function isRedirect() { return false; } public function isCancelled() { return false; } public function getMessage() { return $this->response->json(); } public function getCode() { return $this->response->getStatusCode(); } public function getTransactionReference() { $json = $this->response->json(); return $json['Response']['Id']; } public function getData() { return $this->request->getData(); } }
<?php namespace Omnipay\Paysimple\Message; class Response implements \Omnipay\Common\Message\ResponseInterface { public function __construct($request, $response) { $this->request = $request; $this->response = $response; } public function getRequest() { $this->request; } public function isSuccessful() { return ($this->response->getStatusCode() >= 200 && $this->response->getStatusCode() <= 299); } public function isRedirect() { return false; } public function isCancelled() { return false; } public function getMessage() { return $this->response->json(); } public function getCode() { return $this->response->getStatusCode(); } public function getTransactionReference() { $json = $this->response->json(); return $json['Response']['Id']; } public function getData() { return $this->request->getData(); } }
Add update_tcbs.sql to the mix
"""bug 958558 migration for update_product_version() and friends Revision ID: 2c48009040da Revises: 48e9a4366530 Create Date: 2014-01-13 12:54:13.988864 """ # revision identifiers, used by Alembic. revision = '2c48009040da' down_revision = '4cacd567770f' from alembic import op from socorro.lib import citexttype, jsontype, buildtype from socorro.lib.migrations import fix_permissions, load_stored_proc import sqlalchemy as sa from sqlalchemy import types from sqlalchemy.dialects import postgresql from sqlalchemy.sql import table, column def upgrade(): load_stored_proc(op, ['update_product_versions.sql', 'is_rapid_beta.sql', 'sunset_date.sql', 'update_tcbs.sql' ]) def downgrade(): load_stored_proc(op, ['update_product_versions.sql', 'is_rapid_beta.sql', 'sunset_date.sql', 'update_tcbs.sql' ])
"""bug 958558 migration for update_product_version() and friends Revision ID: 2c48009040da Revises: 48e9a4366530 Create Date: 2014-01-13 12:54:13.988864 """ # revision identifiers, used by Alembic. revision = '2c48009040da' down_revision = '4cacd567770f' from alembic import op from socorro.lib import citexttype, jsontype, buildtype from socorro.lib.migrations import fix_permissions, load_stored_proc import sqlalchemy as sa from sqlalchemy import types from sqlalchemy.dialects import postgresql from sqlalchemy.sql import table, column def upgrade(): load_stored_proc(op, ['update_product_versions.sql', 'is_rapid_beta.sql', 'sunset_date.sql']) def downgrade(): load_stored_proc(op, ['update_product_versions.sql', 'is_rapid_beta.sql', 'sunset_date.sql'])
Remove after column for SQL lite compatibility
<?php use Illuminate\Database\Schema\Blueprint; use Illuminate\Database\Migrations\Migration; class AlterUserTable extends Migration { /** * Run the migrations. */ public function up() { if (Schema::hasTable('users')) { if (!Schema::hasColumn('users', 'username')) { Schema::table('users', function (Blueprint $table) { $table->string('username'); $table->unique('username'); }); } } } /** * Reverse the migrations. */ public function down() { // } }
<?php use Illuminate\Database\Schema\Blueprint; use Illuminate\Database\Migrations\Migration; class AlterUserTable extends Migration { /** * Run the migrations. */ public function up() { if (Schema::hasTable('users')) { if (!Schema::hasColumn('users', 'username')) { Schema::table('users', function (Blueprint $table) { $table->string('username')->after('email'); $table->unique('username'); }); } } } /** * Reverse the migrations. */ public function down() { // } }
[MIG] Change the version of module.
# -*- coding: utf-8 -*- # Copyright (C) 2009 Renato Lima - Akretion # License AGPL-3 - See http://www.gnu.org/licenses/agpl-3.0.html { 'name': 'Brazilian Localisation ZIP Codes', 'license': 'AGPL-3', 'author': 'Akretion, Odoo Community Association (OCA)', 'version': '9.0.1.0.0', 'depends': [ 'l10n_br_base', ], 'data': [ 'views/l10n_br_zip_view.xml', 'views/res_partner_view.xml', 'views/res_company_view.xml', 'views/res_bank_view.xml', 'wizard/l10n_br_zip_search_view.xml', 'security/ir.model.access.csv', ], 'test': [ 'test/zip_demo.yml' ], 'category': 'Localization', 'installable': True, }
# -*- coding: utf-8 -*- # Copyright (C) 2009 Renato Lima - Akretion # License AGPL-3 - See http://www.gnu.org/licenses/agpl-3.0.html { 'name': 'Brazilian Localisation ZIP Codes', 'license': 'AGPL-3', 'author': 'Akretion, Odoo Community Association (OCA)', 'version': '8.0.1.0.1', 'depends': [ 'l10n_br_base', ], 'data': [ 'views/l10n_br_zip_view.xml', 'views/res_partner_view.xml', 'views/res_company_view.xml', 'views/res_bank_view.xml', 'wizard/l10n_br_zip_search_view.xml', 'security/ir.model.access.csv', ], 'test': ['test/zip_demo.yml'], 'category': 'Localization', 'installable': False, }
Use init shortcut in embed config
var path = require('path'); var express = require('express'); var xAdmin = require('express-admin'); var dpath = path.resolve(__dirname, '../', process.argv[2], process.argv[3]); var args = { dpath: dpath, config: require(path.join(dpath, 'config.json')), settings: require(path.join(dpath, 'settings.json')), custom: require(path.join(dpath, 'custom.json')), users: require(path.join(dpath, 'users.json')) }; args.config.app.root = '/admin'; xAdmin.init(args, function (err, admin) { if (err) return console.log(err); // express 3.4.4 var app = express() .use('/admin', admin) .use(express.bodyParser()) .use(express.cookieParser()) .use(express.session({key: 'embed-app', secret: 'secret'})) .use(express.csrf()) .use(express.methodOverride()) .use(express.static(__dirname)); app.get('/', function (req, res) { res.send('Hello World'); }); app.listen(3000, function () { console.log('Express Admin listening on port 3000'); }); });
var path = require('path'); var express = require('express'); var expressAdmin = require('/home/mighty/node/modules/express-admin'); var dpath = path.resolve(__dirname, '../', process.argv[2], process.argv[3]); var expressAdminArgs = { dpath: dpath, config: require(path.join(dpath, 'config.json')), settings: require(path.join(dpath, 'settings.json')), custom: require(path.join(dpath, 'custom.json')), users: require(path.join(dpath, 'users.json')) }; expressAdminArgs.config.app.root = '/admin'; expressAdmin.initDatabase(expressAdminArgs, function (err) { if (err) return console.log(err); expressAdmin.initSettings(expressAdminArgs); var admin = expressAdmin.initServer(expressAdminArgs); // express 3.4.4 var app = express() .use('/admin', admin) .use(express.bodyParser()) .use(express.cookieParser()) .use(express.session({key: 'embed-app', secret: 'secret'})) .use(express.csrf()) .use(express.methodOverride()) .use(express.static(__dirname)); app.get('/', function (req, res) { res.send('Hello World'); }); app.listen(3000, function () { console.log('Express Admin listening on port 3000'); }); });
chore(styleguide): Add a note to the copy to clipboard example on styleguide
/*doc --- title: Copy To Clipboard name: copy_to_clipboard_react categories: - react_components_copy-to-clipboard - react_all --- <code class="pam"> <i class="fa fa-download" alt="Install the Component"></i> npm install pui-react-copy-to-clipboard --save </code> Require the subcomponents: ``` var CopyToClipboard = require('pui-react-copy-to-clipboard').CopyToClipboard; var CopyToClipboardButton = require('pui-react-copy-to-clipboard').CopyToClipboardButton; ``` ```react_example_table <CopyToClipboard text="I got copied by a button"><button>Click Me To Copy</button></CopyToClipboard> <CopyToClipboardButton text="I got copied by a good looking button"/> ``` The CopyToClipboard Components require the following property: Property | Type | Description ------------- | --------------| -------------------------------------------------------------------------- `text` | String | Text that is copied when the user clicks Below is a common example combining a readonly input and a copy button. Note that there is custom css on the styleguide to get the positioning right. ``` var Input = require('pui-react-inputs').Input ``` ```react_example_table <div className="copy-input"> <Input label="shareable link" value="bar.com/1234.jpg" readOnly /> <CopyToClipboardButton text="bar.com/1234.jpg"/> </div> ``` */
/*doc --- title: Copy To Clipboard name: copy_to_clipboard_react categories: - react_components_copy-to-clipboard - react_all --- <code class="pam"> <i class="fa fa-download" alt="Install the Component"></i> npm install pui-react-copy-to-clipboard --save </code> Require the subcomponents: ``` var CopyToClipboard = require('pui-react-copy-to-clipboard').CopyToClipboard; var CopyToClipboardButton = require('pui-react-copy-to-clipboard').CopyToClipboardButton; ``` ```react_example_table <CopyToClipboard text="I got copied by a button"><button>Click Me To Copy</button></CopyToClipboard> <CopyToClipboardButton text="I got copied by a good looking button"/> ``` The CopyToClipboard Components require the following property: Property | Type | Description ------------- | --------------| -------------------------------------------------------------------------- `text` | String | Text that is copied when the user clicks Below is a common example combining a readonly input and a copy button: ``` var Input = require('pui-react-inputs').Input ``` ```react_example_table <div className="copy-input"> <Input label="shareable link" value="bar.com/1234.jpg" readOnly /> <CopyToClipboardButton text="bar.com/1234.jpg"/> </div> ``` */
Unravel index for argmin of array
import doseresponse as dr import numpy as np import itertools as it import os import argparse import sys parser = argparse.ArgumentParser() requiredNamed = parser.add_argument_group('required arguments') requiredNamed.add_argument("--data-file", type=str, help="csv file from which to read in data, in same format as provided crumb_data.csv", required=True) if len(sys.argv)==1: parser.print_help() sys.exit(1) args = parser.parse_args() dr.setup(args.data_file) drugs_channels_idx = it.product(range(30), range(7)) BFs = np.zeros((30, 7)) for i, j in drugs_channels_idx: top_drug = dr.drugs[i] top_channel = dr.channels[j] drug, channel, chain_file, images_dir = dr.nonhierarchical_chain_file_and_figs_dir(1, top_drug, top_channel, 1) bf_dir = "BFs/" bf_file = bf_dir + "{}_{}_B12.txt".format(drug,channel) BFs[i, j] = np.loadtxt(bf_file) max_idx = np.unravel_index(np.argmax(BFs), (30,7)) min_idx = np.unravel_index(np.argmin(BFs), (30,7)) print "max:", BFs[max_idx] print "min:", BFs[min_idx]
import doseresponse as dr import numpy as np import itertools as it import os import argparse import sys parser = argparse.ArgumentParser() requiredNamed = parser.add_argument_group('required arguments') requiredNamed.add_argument("--data-file", type=str, help="csv file from which to read in data, in same format as provided crumb_data.csv", required=True) if len(sys.argv)==1: parser.print_help() sys.exit(1) args = parser.parse_args() dr.setup(args.data_file) drugs_channels_idx = it.product(range(30), range(7)) BFs = np.zeros((30, 7)) for i, j in drugs_channels_idx: top_drug = dr.drugs[i] top_channel = dr.channels[j] drug, channel, chain_file, images_dir = dr.nonhierarchical_chain_file_and_figs_dir(1, top_drug, top_channel, 1) bf_dir = "BFs/" bf_file = bf_dir + "{}_{}_B12.txt".format(drug,channel) BFs[i, j] = np.loadtxt(bf_file) max_idx = np.argmax(BFs) min_idx = np.argmin(BFs) print "max:", BFs[max_idx] print "min:", BFs[min_idx]
Save runlog as output is added.
from django.db import models from .bugwarriorconfig import BugwarriorConfig class BugwarriorConfigRunLog(models.Model): config = models.ForeignKey( BugwarriorConfig, related_name='run_logs', ) success = models.BooleanField(default=False) output = models.TextField() stack_trace = models.TextField() started = models.DateTimeField() finished = models.DateTimeField(null=True) def add_output(self, new): lines = [line for line in self.output.split('\n') if line] lines.append(new) self.output = '\n'.join(lines) self.save() def __unicode__(self): if self.success: category = 'Successful' else: category = 'Failed' return u"{category} bugwarrior-pull run of {config}".format( category=category, config=self.config ) class Meta: app_label = 'taskmanager'
from django.db import models from .bugwarriorconfig import BugwarriorConfig class BugwarriorConfigRunLog(models.Model): config = models.ForeignKey( BugwarriorConfig, related_name='run_logs', ) success = models.BooleanField(default=False) output = models.TextField() stack_trace = models.TextField() started = models.DateTimeField() finished = models.DateTimeField(null=True) def add_output(self, new): lines = [line for line in self.output.split('\n') if line] lines.append(new) self.output = '\n'.join(lines) def __unicode__(self): if self.success: category = 'Successful' else: category = 'Failed' return u"{category} bugwarrior-pull run of {config}".format( category=category, config=self.config ) class Meta: app_label = 'taskmanager'
Print the Jedi version when REPL completion is used This also makes debugging easier, because people see which completion they're actually using.
""" To use Jedi completion in Python interpreter, add the following in your shell setup (e.g., ``.bashrc``):: export PYTHONSTARTUP="$(python -m jedi repl)" Then you will be able to use Jedi completer in your Python interpreter:: $ python Python 2.7.2+ (default, Jul 20 2012, 22:15:08) [GCC 4.6.1] on linux2 Type "help", "copyright", "credits" or "license" for more information. >>> import os >>> os.path.join().split().in<TAB> # doctest: +SKIP os.path.join().split().index os.path.join().split().insert """ import jedi.utils from jedi import __version__ as __jedi_version__ print('REPL completion using Jedi %s' % __jedi_version__) jedi.utils.setup_readline() del jedi # Note: try not to do many things here, as it will contaminate global # namespace of the interpreter.
""" To use Jedi completion in Python interpreter, add the following in your shell setup (e.g., ``.bashrc``):: export PYTHONSTARTUP="$(python -m jedi repl)" Then you will be able to use Jedi completer in your Python interpreter:: $ python Python 2.7.2+ (default, Jul 20 2012, 22:15:08) [GCC 4.6.1] on linux2 Type "help", "copyright", "credits" or "license" for more information. >>> import os >>> os.path.join().split().in<TAB> # doctest: +SKIP os.path.join().split().index os.path.join().split().insert """ import jedi.utils jedi.utils.setup_readline() del jedi # Note: try not to do many things here, as it will contaminate global # namespace of the interpreter.
Add autoincrement for jump in the future
class BytecodeBase: autoincrement = True # For jump def __init__(self): # Eventually might want to add subclassed bytecodes here # Though __subclasses__ works quite well pass def execute(self, machine): pass class Push(BytecodeBase): def __init__(self, data): self.data = data def execute(self, machine): machine.push(self.data) class Pop(BytecodeBase): def execute(self, machine): return machine.pop() class Add(BytecodeBase): def execute(self, machine): a = machine.pop() b = machine.pop() machine.push(a+b) class Sub(BytecodeBase): def execute(self, machine): a = machine.pop() b = machine.pop() machine.push(a-b) class Mul(BytecodeBase): def execute(self, machine): a = machine.pop() b = machine.pop() machine.push(a*b) class Div(BytecodeBase): def execute(self, machine): a = machine.pop() b = machine.pop() machine.push(a/b) class Terminate(BytecodeBase): def execute(self, machine): machine.executing = False
class BytecodeBase: def __init__(self): # Eventually might want to add subclassed bytecodes here # Though __subclasses__ works quite well pass def execute(self, machine): pass class Push(BytecodeBase): def __init__(self, data): self.data = data def execute(self, machine): machine.push(self.data) class Pop(BytecodeBase): def execute(self, machine): return machine.pop() class Add(BytecodeBase): def execute(self, machine): a = machine.pop() b = machine.pop() machine.push(a+b) class Sub(BytecodeBase): def execute(self, machine): a = machine.pop() b = machine.pop() machine.push(a-b) class Mul(BytecodeBase): def execute(self, machine): a = machine.pop() b = machine.pop() machine.push(a*b) class Div(BytecodeBase): def execute(self, machine): a = machine.pop() b = machine.pop() machine.push(a/b) class Terminate(BytecodeBase): def execute(self, machine): machine.executing = False
Change webhook priority to allow store owners to use the event before us
<?php /** * Created by PhpStorm. * User: Zooma * Date: 21/09/15 * Time: 15:58 */ class Channel_Engine_Order_Complete { private $client; /** * Constructor */ public function __construct($client) { $this->client = $client; add_action('woocommerce_order_status_completed', array($this,'post_shipment_complete_status'), 5, 1); } public function post_shipment_complete_status($wc_order_id){ require_once(plugin_dir_path(__FILE__) . 'class-channel-engine-api.php'); $channel_engine_api_client = new Channel_Engine_API($this->client); $channel_engine_api_client->post_shipment_complete_status($wc_order_id); } }
<?php /** * Created by PhpStorm. * User: Zooma * Date: 21/09/15 * Time: 15:58 */ class Channel_Engine_Order_Complete { private $client; /** * Constructor */ public function __construct($client) { $this->client = $client; add_action('woocommerce_order_status_completed', array($this,'post_shipment_complete_status') , 0, 1 ); } public function post_shipment_complete_status($wc_order_id){ require_once(plugin_dir_path(__FILE__) . 'class-channel-engine-api.php'); $channel_engine_api_client = new Channel_Engine_API($this->client); $channel_engine_api_client->post_shipment_complete_status($wc_order_id); } }
Rename paths variable to fnames.
import os class FileIterator(object): """ This FileIterator iterates over the paths contained in one or more directories. """ def __init__(self, *paths): """ @param paths: the file or directory paths. """ self._paths = paths def __iter__(self): return self.next() def next(self): for f in self._paths: if os.path.isfile(f): yield f elif os.path.isdir(f): for root, dirs, fnames in os.walk(f): for fn in fnames: path = os.path.join(root, fn) yield path
import os class FileIterator(object): """ This FileIterator iterates over the paths contained in one or more directories. """ def __init__(self, *paths): """ @param paths: the file or directory paths. """ self._paths = paths def __iter__(self): return self.next() def next(self): for f in self._paths: if os.path.isfile(f): yield f elif os.path.isdir(f): for root, dirs, paths in os.walk(f): for f in paths: path = os.path.join(root, f) yield path
Fix tests for menu toggling
import { expect } from 'chai' import helpers from './helpers' import store from '../../src/store' import { TOGGLE_SIDEBAR, TOGGLE_USER_MENU } from '../../src/store/mutation-types' describe('main', () => { beforeEach(helpers.reset) afterEach(helpers.reset) describe('actions', () => { it('toggleSidebar', () => { helpers.runAction('toggleSidebar') expect(store._vm.isSidebarHidden).to.equal(false) }) it('toggleUserMenu', () => { helpers.runAction('toggleUserMenu') expect(store._vm.isUserMenuHidden).to.equal(false) }) }) describe('mutations', () => { it('TOGGLE_SIDEBAR', () => { store.commit(TOGGLE_SIDEBAR) expect(store._vm.isSidebarHidden).to.equal(false) store.commit(TOGGLE_SIDEBAR) expect(store._vm.isSidebarHidden).to.equal(true) }) it('TOGGLE_USER_MENU', () => { store.commit(TOGGLE_USER_MENU) expect(store._vm.isUserMenuHidden).to.equal(false) store.commit(TOGGLE_USER_MENU) expect(store._vm.isUserMenuHidden).to.equal(true) }) }) })
import { expect } from 'chai' import store from '../../src/store' import { TOGGLE_SIDEBAR, TOGGLE_USER_MENU } from '../../src/store/mutation-types' describe('main', () => { describe('actions', () => { it('toggleSidebar', () => { store._actions.toggleSidebar[0]({ commit: store.commit, state: store._vm.state }) expect(store._vm.isSidebarHidden).to.equal(false) }) it('toggleUserMenu', () => { store._actions.toggleUserMenu[0]({ commit: store.commit, state: store._vm.state }) expect(store._vm.isUserMenuHidden).to.equal(false) }) }) describe('mutations', () => { it('TOGGLE_SIDEBAR', () => { store.commit(TOGGLE_SIDEBAR) expect(store._vm.isSidebarHidden).to.equal(true) store.commit(TOGGLE_SIDEBAR) expect(store._vm.isSidebarHidden).to.equal(false) }) it('TOGGLE_USER_MENU', () => { store.commit(TOGGLE_USER_MENU) expect(store._vm.isUserMenuHidden).to.equal(true) store.commit(TOGGLE_USER_MENU) expect(store._vm.isUserMenuHidden).to.equal(false) }) }) })
Throw meaningful exception if properties file could not be found
import java.io.IOException; import java.io.InputStream; import java.util.Properties; import javax.xml.bind.JAXBException; import org.junit.Before; import org.junit.Test; import uk.bl.odin.orcid.client.OrcidAccessToken; import uk.bl.odin.orcid.client.OrcidOAuthClient; import uk.bl.odin.orcid.client.constants.OrcidAuthScope; import static org.junit.Assert.*; /** Most tests migrated to orcid-update-app as it requires server interaction. * * Developers should ensure that they copy testoauth.properties.example to testoauth.properties * and modify it to contain their credentials. * * @author tom * */ public class OrcidOAuthClientTest { private Properties properties = new Properties(); @Before public void before() throws IOException { final String filename = "testoauth.properties"; final InputStream inputStream = getClass().getResourceAsStream(filename); if (inputStream == null) { throw new IOException("Unable to find properties file src/test/resources/" + filename); } properties.load(inputStream); } @Test public final void testCreateProfile() throws IOException, JAXBException { OrcidOAuthClient client = new OrcidOAuthClient(properties.getProperty("orcidClientID"), properties.getProperty("orcidClientSecret"), properties.getProperty("orcidReturnUri"), Boolean.valueOf(properties.getProperty("orcidSandbox"))); OrcidAccessToken tok = client.getCreateProfileAccessToken(); assertNotNull(tok.getAccess_token()); } }
import java.io.IOException; import java.util.Properties; import javax.xml.bind.JAXBException; import org.junit.Before; import org.junit.Test; import uk.bl.odin.orcid.client.OrcidAccessToken; import uk.bl.odin.orcid.client.OrcidOAuthClient; import uk.bl.odin.orcid.client.constants.OrcidAuthScope; import static org.junit.Assert.*; /** Most tests migrated to orcid-update-app as it requires server interaction. * * Developers should ensure that they copy testoauth.properties.example to testoauth.properties * and modify it to contain their credentials. * * @author tom * */ public class OrcidOAuthClientTest { private Properties properties = new Properties(); @Before public void before() throws IOException { properties.load(getClass().getResourceAsStream("testoauth.properties")); } @Test public final void testCreateProfile() throws IOException, JAXBException { OrcidOAuthClient client = new OrcidOAuthClient(properties.getProperty("orcidClientID"), properties.getProperty("orcidClientSecret"), properties.getProperty("orcidReturnUri"), Boolean.valueOf(properties.getProperty("orcidSandbox"))); OrcidAccessToken tok = client.getCreateProfileAccessToken(); assertNotNull(tok.getAccess_token()); } }
Make streamable artist updates as they happen, rather than commiting at the end of all artists
#!/usr/bin/env python import psycopg2 as ordbms import urllib, urllib2 import xml.etree.cElementTree as ElementTree class SetArtistStreamable: def __init__(self): self.conn = ordbms.connect ("dbname='librefm'") self.cursor = self.conn.cursor() def updateAll(self): """Sets artists streamable property if they have streamable tracks already in the database""" self.cursor.execute("SELECT DISTINCT(artist.name) FROM artist INNER JOIN track on artist.name=artist_name WHERE track.streamable = 1") for artist in self.cursor.fetchall(): name = artist[0] print "marking %s as streamable... " % name self.cursor.execute("UPDATE artist SET streamable = 1 WHERE name = %s", (name,)) self.conn.commit() if __name__ == '__main__': sas = SetArtistStreamable() sas.updateAll()
#!/usr/bin/env python import psycopg2 as ordbms import urllib, urllib2 import xml.etree.cElementTree as ElementTree class SetArtistStreamable: def __init__(self): self.conn = ordbms.connect ("dbname='librefm'") self.cursor = self.conn.cursor() def updateAll(self): """Sets artists streamable property if they have streamable tracks already in the database""" self.cursor.execute("SELECT DISTINCT(artist.name) FROM artist INNER JOIN track on artist.name=artist_name WHERE track.streamable = 1") for artist in self.cursor.fetchall(): name = artist[0] print "marking %s as streamable... " % name self.cursor.execute("UPDATE artist SET streamable = 1 WHERE name = %s", (name,)) print "Applying changes... ", self.conn.commit() print "done." if __name__ == '__main__': sas = SetArtistStreamable() sas.updateAll()
tests: Fix test running when Django isn't available.
# -*- coding: utf-8 -*- # Copyright (c) 2012-2013 Raphaël Barrois try: from django.db import models django_loaded = True except ImportError: django_loaded = False if django_loaded: from semantic_version import django_fields as semver_fields class VersionModel(models.Model): version = semver_fields.VersionField(verbose_name='my version') spec = semver_fields.SpecField(verbose_name='my spec') class PartialVersionModel(models.Model): partial = semver_fields.VersionField(partial=True, verbose_name='partial version') optional = semver_fields.VersionField(verbose_name='optional version', blank=True, null=True) optional_spec = semver_fields.SpecField(verbose_name='optional spec', blank=True, null=True) class CoerceVersionModel(models.Model): version = semver_fields.VersionField(verbose_name='my version', coerce=True) partial = semver_fields.VersionField(verbose_name='partial version', coerce=True, partial=True)
# -*- coding: utf-8 -*- # Copyright (c) 2012-2013 Raphaël Barrois from django.db import models from semantic_version import django_fields as semver_fields class VersionModel(models.Model): version = semver_fields.VersionField(verbose_name='my version') spec = semver_fields.SpecField(verbose_name='my spec') class PartialVersionModel(models.Model): partial = semver_fields.VersionField(partial=True, verbose_name='partial version') optional = semver_fields.VersionField(verbose_name='optional version', blank=True, null=True) optional_spec = semver_fields.SpecField(verbose_name='optional spec', blank=True, null=True) class CoerceVersionModel(models.Model): version = semver_fields.VersionField(verbose_name='my version', coerce=True) partial = semver_fields.VersionField(verbose_name='partial version', coerce=True, partial=True)
Use built-in Flask server when debugging - websockets won't work.
#!/usr/bin/env python from flask import Flask import views from util import generate_filename, massage_record, make_trace_folder def setup_routes(app): app.add_url_rule('/', 'index', views.visualization, methods=['GET']) app.add_url_rule('/visualization', 'visualization', views.visualization, methods=['GET']) app.add_url_rule('/records', 'add_record', views.add_record, methods=['POST']) app.add_url_rule('/records', 'show_records', views.show_records, methods=['GET']) def create_app(config=None): app = Flask(__name__) app.config.from_pyfile("settings.py") if config: app.config.update(config) setup_routes(app) make_trace_folder(app) return app app = create_app() if __name__ == '__main__': app = create_app() app.run("0.0.0.0")
#!/usr/bin/env python from gevent.pywsgi import WSGIServer from flask import Flask import views from handler import PatchedWebSocketHandler from util import generate_filename, massage_record, make_trace_folder def setup_routes(app): app.add_url_rule('/', 'index', views.visualization, methods=['GET']) app.add_url_rule('/visualization', 'visualization', views.visualization, methods=['GET']) app.add_url_rule('/records', 'add_record', views.add_record, methods=['POST']) app.add_url_rule('/records', 'show_records', views.show_records, methods=['GET']) def create_app(config=None): app = Flask(__name__) app.config.from_pyfile("settings.py") if config: app.config.update(config) setup_routes(app) make_trace_folder(app) return app app = create_app() if __name__ == '__main__': app = create_app() server = WSGIServer(('', 5000), app, handler_class=PatchedWebSocketHandler) server.serve_forever()
Implement __getattr__ to reduce code
class SpaceAge(object): YEARS = {"on_earth": 1, "on_mercury": 0.2408467, "on_venus": 0.61519726, "on_mars": 1.8808158, "on_jupiter": 11.862615, "on_saturn": 29.447498, "on_uranus": 84.016846, "on_neptune": 164.79132} def __init__(self, seconds): self.seconds = seconds @property def years(self): return self.seconds/31557600 def __getattr__(self, on_planet): if on_planet in SpaceAge.YEARS: return lambda: round(self.years/SpaceAge.YEARS[on_planet], 2) else: raise AttributeError
class SpaceAge(object): def __init__(self, seconds): self.seconds = seconds @property def years(self): return self.seconds/31557600 def on_earth(self): return round(self.years, 2) def on_mercury(self): return round(self.years/0.2408467, 2) def on_venus(self): return round(self.years/0.6151976, 2) def on_mars(self): return round(self.years/1.8808158, 2) def on_jupiter(self): return round(self.years/11.862615, 2) def on_saturn(self): return round(self.years/29.447498, 2) def on_uranus(self): return round(self.years/84.016846, 2) def on_neptune(self): return round(self.years/164.79132, 2)
Reset list view after showing panel Signed-off-by: Max Brunsfeld <78036c9b69b887700d5846f26a788d53b201ffbb@gmail.com>
'use babel'; import LineEndingListView from './line-ending-list-view'; let lineEndingTile = null; export function activate() { } export function consumeStatusBar(statusBar) { lineEndingTile = document.createElement('a'); lineEndingTile.className = "line-ending-tile"; atom.workspace.observeActivePaneItem((item) => { lineEndingTile.textContent = getLineEnding(item); }); let listView = new LineEndingListView((lineEnding) => { if (lineEnding) lineEndingTile.textContent = lineEnding; panel.hide(); }); let panel = atom.workspace.addModalPanel({ item: listView, visible: false }); lineEndingTile.addEventListener('click', () => { panel.show(); listView.reset(); }); statusBar.addRightTile({item: lineEndingTile, priority: 200}); } function getLineEnding(item) { if (!item) return ""; let hasLF = false; let hasCRLF = false; if (item && item.scan) { item.scan(/\n|\r\n/g, ({matchText}) => { if (matchText === "\n") hasLF = true; if (matchText === "\r\n") hasCRLF = true; }); } if (hasLF && hasCRLF) return "Mixed"; if (hasLF) return "LF"; if (hasCRLF) return "CRLF"; if (process.platform === 'win32') return "CRLF"; return "LF"; }
'use babel'; import LineEndingListView from './line-ending-list-view'; let lineEndingTile = null; export function activate() { atom.workspace.observeActivePaneItem((item) => { let ending = getLineEnding(item); if (lineEndingTile) lineEndingTile.textContent = ending; }); } export function consumeStatusBar(statusBar) { lineEndingTile = document.createElement('a'); lineEndingTile.className = "line-ending-tile"; let listView = new LineEndingListView((lineEnding) => { if (lineEnding) lineEndingTile.textContent = lineEnding; panel.hide(); }); let panel = atom.workspace.addModalPanel({ item: listView, visible: false }); lineEndingTile.addEventListener('click', () => { panel.show(); listView.reset(); }); statusBar.addRightTile({item: lineEndingTile, priority: 200}); } function getLineEnding(item) { if (!item) return ""; let hasLF = false; let hasCRLF = false; if (item && item.scan) { item.scan(/\n|\r\n/g, ({matchText}) => { if (matchText === "\n") hasLF = true; if (matchText === "\r\n") hasCRLF = true; }); } if (hasLF && hasCRLF) return "Mixed"; if (hasLF) return "LF"; if (hasCRLF) return "CRLF"; if (process.platform === 'win32') return "CRLF"; return "LF"; }
Upgrade tangled 0.1a7 => 0.1a9
from setuptools import setup setup( name='tangled.sqlalchemy', version='0.1a4.dev0', description='Tangled SQLAlchemy integration', long_description=open('README.rst').read(), url='http://tangledframework.org/', download_url='https://github.com/TangledWeb/tangled.sqlalchemy/tags', author='Wyatt Baldwin', author_email='self@wyattbaldwin.com', packages=[ 'tangled', 'tangled.sqlalchemy', ], install_requires=[ 'tangled>=0.1a9', 'SQLAlchemy', ], extras_require={ 'dev': [ 'tangled[dev]>=0.1a9', ], }, classifiers=[ 'Development Status :: 3 - Alpha', 'Intended Audience :: Developers', 'License :: OSI Approved :: MIT License', 'Programming Language :: Python :: 3', 'Programming Language :: Python :: 3.3', 'Programming Language :: Python :: 3.4', ], )
from setuptools import setup setup( name='tangled.sqlalchemy', version='0.1a4.dev0', description='Tangled SQLAlchemy integration', long_description=open('README.rst').read(), url='http://tangledframework.org/', download_url='https://github.com/TangledWeb/tangled.sqlalchemy/tags', author='Wyatt Baldwin', author_email='self@wyattbaldwin.com', packages=[ 'tangled', 'tangled.sqlalchemy', ], install_requires=[ 'tangled>=0.1a7', 'SQLAlchemy', ], extras_require={ 'dev': [ 'tangled[dev]>=0.1a7', ], }, classifiers=[ 'Development Status :: 3 - Alpha', 'Intended Audience :: Developers', 'License :: OSI Approved :: MIT License', 'Programming Language :: Python :: 3', 'Programming Language :: Python :: 3.3', 'Programming Language :: Python :: 3.4', ], )
Add query params to demo
import Ember from 'ember'; import burgerMenu from 'ember-burger-menu'; const { inject, computed } = Ember; export default Ember.Controller.extend({ queryParams: [ 'animation', 'itemAnimation', 'position', 'translucentOverlay', 'dismissOnClick', 'dismissOnEsc' ], application: inject.controller(), burgerMenu, animation: computed.alias('burgerMenu.animation'), itemAnimation: computed.alias('burgerMenu.itemAnimation'), position: computed.alias('burgerMenu.position'), translucentOverlay: computed.alias('application.translucentOverlay'), dismissOnClick: computed.alias('application.dismissOnClick'), dismissOnEsc: computed.alias('application.dismissOnEsc'), animations: [ 'slide', 'reveal', 'push', 'fall-down', 'open-door', 'push-rotate', 'rotate-out', 'scale-up', 'scale-down', 'scale-rotate', 'slide-reverse' ], itemAnimations: [ 'push', 'stack' ], actions: { setMenu() { this.get('burgerMenu').set(...arguments); } } });
import Ember from 'ember'; import burgerMenu from 'ember-burger-menu'; const { inject, computed } = Ember; export default Ember.Controller.extend({ application: inject.controller(), burgerMenu, translucentOverlay: computed.alias('application.translucentOverlay'), dismissOnClick: computed.alias('application.dismissOnClick'), dismissOnEsc: computed.alias('application.dismissOnEsc'), animations: [ 'slide', 'reveal', 'push', 'fall-down', 'open-door', 'push-rotate', 'rotate-out', 'scale-up', 'scale-down', 'scale-rotate', 'slide-reverse' ], itemAnimations: [ 'push', 'stack' ], actions: { setMenu() { this.get('burgerMenu').set(...arguments); } } });
Reset version number for maintenance branch.
"""Spherical harmonic vector wind analysis.""" # Copyright (c) 2012-2013 Andrew Dawson # # Permission is hereby granted, free of charge, to any person obtaining a copy # of this software and associated documentation files (the "Software"), to deal # in the Software without restriction, including without limitation the rights # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell # copies of the Software, and to permit persons to whom the Software is # furnished to do so, subject to the following conditions: # # The above copyright notice and this permission notice shall be included in # all copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN # THE SOFTWARE. from __future__ import absolute_import from . import standard from . import tools # List to define the behaviour of imports of the form: # from windspharm import * __all__ = [] # Package version number. __version__ = '1.2.x' try: from . import cdms __all__.append('cdms') metadata = cdms except ImportError: pass try: from . import iris __all__.append('iris') except ImportError: pass
"""Spherical harmonic vector wind analysis.""" # Copyright (c) 2012-2013 Andrew Dawson # # Permission is hereby granted, free of charge, to any person obtaining a copy # of this software and associated documentation files (the "Software"), to deal # in the Software without restriction, including without limitation the rights # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell # copies of the Software, and to permit persons to whom the Software is # furnished to do so, subject to the following conditions: # # The above copyright notice and this permission notice shall be included in # all copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN # THE SOFTWARE. from __future__ import absolute_import from . import standard from . import tools # List to define the behaviour of imports of the form: # from windspharm import * __all__ = [] # Package version number. __version__ = '1.2.0' try: from . import cdms __all__.append('cdms') metadata = cdms except ImportError: pass try: from . import iris __all__.append('iris') except ImportError: pass
Add debugging to find issue.
import traceback from flask import render_template, redirect, jsonify, session from flask_login import login_user from app.main import main from app.main.dao import users_dao, verify_codes_dao from app.main.forms import TwoFactorForm @main.route("/two-factor", methods=['GET']) def render_two_factor(): return render_template('views/two-factor.html', form=TwoFactorForm()) @main.route('/two-factor', methods=['POST']) def process_two_factor(): try: form = TwoFactorForm() if form.validate_on_submit(): user = users_dao.get_user_by_id(session['user_id']) verify_codes_dao.use_code_for_user_and_type(user_id=user.id, code_type='sms') login_user(user) return redirect('/dashboard') else: return jsonify(form.errors), 400 except: traceback.print_exc()
from flask import render_template, redirect, jsonify, session from flask_login import login_user from app.main import main from app.main.dao import users_dao, verify_codes_dao from app.main.forms import TwoFactorForm @main.route("/two-factor", methods=['GET']) def render_two_factor(): return render_template('views/two-factor.html', form=TwoFactorForm()) @main.route('/two-factor', methods=['POST']) def process_two_factor(): form = TwoFactorForm() if form.validate_on_submit(): user = users_dao.get_user_by_id(session['user_id']) verify_codes_dao.use_code_for_user_and_type(user_id=user.id, code_type='sms') login_user(user) return redirect('/dashboard') else: return jsonify(form.errors), 400
Fix the ASAN build by xfailing test_decimal when ASAN_OPTIONS is set. Adding decimal columns crashes an ASAN built impalad. This change skips the test. Change-Id: Ic94055a3f0d00f89354177de18bc27d2f4cecec2 Reviewed-on: http://gerrit.ent.cloudera.com:8080/2532 Reviewed-by: Ishaan Joshi <d1d1e60202ec9f2503deb1b724986485a125d802@cloudera.com> Tested-by: jenkins Reviewed-on: http://gerrit.ent.cloudera.com:8080/2594
#!/usr/bin/env python # Copyright (c) 2012 Cloudera, Inc. All rights reserved. # Targeted tests for decimal type. # import logging import pytest from copy import copy from tests.common.test_vector import * from tests.common.impala_test_suite import * class TestDecimalQueries(ImpalaTestSuite): BATCH_SIZES = [0, 1] @classmethod def get_workload(cls): return 'functional-query' @classmethod def add_test_dimensions(cls): super(TestDecimalQueries, cls).add_test_dimensions() cls.TestMatrix.add_dimension( TestDimension('batch_size', *TestDecimalQueries.BATCH_SIZES)) # On CDH4, hive does not support decimal so we can't run these tests against # the other file formats. Enable them on C5. cls.TestMatrix.add_constraint(lambda v:\ (v.get_value('table_format').file_format == 'text' and v.get_value('table_format').compression_codec == 'none') or v.get_value('table_format').file_format == 'parquet') def test_queries(self, vector): if os.environ.get('ASAN_OPTIONS') == 'handle_segv=0': pytest.xfail(reason="IMPALA-959: Sum on a decimal column fails ASAN") new_vector = copy(vector) new_vector.get_value('exec_option')['batch_size'] = vector.get_value('batch_size') self.run_test_case('QueryTest/decimal', new_vector)
#!/usr/bin/env python # Copyright (c) 2012 Cloudera, Inc. All rights reserved. # Targeted tests for decimal type. # import logging import pytest from copy import copy from tests.common.test_vector import * from tests.common.impala_test_suite import * class TestDecimalQueries(ImpalaTestSuite): BATCH_SIZES = [0, 1] @classmethod def get_workload(cls): return 'functional-query' @classmethod def add_test_dimensions(cls): super(TestDecimalQueries, cls).add_test_dimensions() cls.TestMatrix.add_dimension( TestDimension('batch_size', *TestDecimalQueries.BATCH_SIZES)) # On CDH4, hive does not support decimal so we can't run these tests against # the other file formats. Enable them on C5. cls.TestMatrix.add_constraint(lambda v:\ (v.get_value('table_format').file_format == 'text' and v.get_value('table_format').compression_codec == 'none') or v.get_value('table_format').file_format == 'parquet') def test_queries(self, vector): new_vector = copy(vector) new_vector.get_value('exec_option')['batch_size'] = vector.get_value('batch_size') self.run_test_case('QueryTest/decimal', new_vector)
Increase timeout in crash/worker test for travis
'use strict'; var path = require('path'); var expect = require('thehelp-test').expect; var supertest = require('supertest'); var util = require('./util'); describe('top-level crash in worker', function() { var agent, child; before(function(done) { agent = supertest.agent('http://localhost:3000'); child = util.startProcess(path.join(__dirname, '../../scenarios/crash_worker.js')); setTimeout(done, 1000); }); it('logs out top-level exception, calls last-ditch, graceful shutdown', function(done) { this.timeout(10000); child.on('close', function() { expect(child).to.have.property('result'); expect(child.result).to.match(/LastDitch: crash/); expect(child.result).to.match(/Worker #1 top-level domain error/); expect(child.result).to.match(/Worker #2 top-level domain error/); expect(child.result).to.match(/died after less than spin timeout/); expect(child.result).to.match(/No workers currently running!/); expect(child.result).to.match(/All workers gone./); done(); }); setTimeout(function() { child.kill(); }, 4000); }); });
'use strict'; var path = require('path'); var expect = require('thehelp-test').expect; var supertest = require('supertest'); var util = require('./util'); describe('top-level crash in worker', function() { var agent, child; before(function(done) { agent = supertest.agent('http://localhost:3000'); child = util.startProcess(path.join(__dirname, '../../scenarios/crash_worker.js')); setTimeout(done, 1000); }); it('logs out top-level exception, calls last-ditch, graceful shutdown', function(done) { this.timeout(10000); child.on('close', function() { expect(child).to.have.property('result'); expect(child.result).to.match(/LastDitch: crash/); expect(child.result).to.match(/Worker #1 top-level domain error/); expect(child.result).to.match(/Worker #2 top-level domain error/); expect(child.result).to.match(/died after less than spin timeout/); expect(child.result).to.match(/No workers currently running!/); expect(child.result).to.match(/All workers gone./); done(); }); setTimeout(function() { child.kill(); }, 2000); }); });
Use evt.coordinate instead of evt.getCoordinate()
goog.require('ol.Map'); goog.require('ol.RendererHint'); goog.require('ol.View2D'); goog.require('ol.layer.Tile'); goog.require('ol.source.TileWMS'); var wmsSource = new ol.source.TileWMS({ url: 'http://demo.opengeo.org/geoserver/wms', params: {'LAYERS': 'ne:ne'} }); var wmsLayer = new ol.layer.Tile({ source: wmsSource }); var view = new ol.View2D({ center: [0, 0], zoom: 1 }); var viewProjection = /** @type {ol.proj.Projection} */ (view.getProjection()); var map = new ol.Map({ layers: [wmsLayer], renderer: ol.RendererHint.CANVAS, target: 'map', view: view }); map.on('singleclick', function(evt) { document.getElementById('info').innerHTML = ''; var viewResolution = /** @type {number} */ (view.getResolution()); var url = wmsSource.getGetFeatureInfoUrl( evt.coordinate, viewResolution, viewProjection, {'INFO_FORMAT': 'text/html'}); if (url) { document.getElementById('info').innerHTML = '<iframe seamless src="' + url + '"></iframe>'; } });
goog.require('ol.Map'); goog.require('ol.RendererHint'); goog.require('ol.View2D'); goog.require('ol.layer.Tile'); goog.require('ol.source.TileWMS'); var wmsSource = new ol.source.TileWMS({ url: 'http://demo.opengeo.org/geoserver/wms', params: {'LAYERS': 'ne:ne'} }); var wmsLayer = new ol.layer.Tile({ source: wmsSource }); var view = new ol.View2D({ center: [0, 0], zoom: 1 }); var viewProjection = /** @type {ol.proj.Projection} */ (view.getProjection()); var map = new ol.Map({ layers: [wmsLayer], renderer: ol.RendererHint.CANVAS, target: 'map', view: view }); map.on('singleclick', function(evt) { document.getElementById('info').innerHTML = ''; var viewResolution = /** @type {number} */ (view.getResolution()); var url = wmsSource.getGetFeatureInfoUrl( evt.getCoordinate(), viewResolution, viewProjection, {'INFO_FORMAT': 'text/html'}); if (url) { document.getElementById('info').innerHTML = '<iframe seamless src="' + url + '"></iframe>'; } });
Use practicalmeteor:chai and not spacejamio:chai
Package.describe({ name: "practicalmeteor:google-translate", summary: "Server side synchronous google translate.", git: "https://github.com/practicalmeteor/meteor-google-translate.git", version: '0.9.0-rc0' }); Npm.depends({'googleapis': '1.0.21'}); Package.onUse(function (api) { api.versionsFrom('0.9.3'); api.use('coffeescript', 'server'); api.use(['practicalmeteor:loglevel@1.1.0_2', 'practicalmeteor:chai@1.9.2_3'], 'server'); // Uncomment once we upgrade to loglevel v2, with package specific loggers // api.addFiles('src/lib/log.js'); api.addFiles([ 'src/server/GoogleTranslate.coffee' ], "server"); //api.export('GoogleTranslate', 'server'); }); Package.onTest(function (api) { api.use([ 'coffeescript', 'practicalmeteor:google-translate', 'practicalmeteor:loglevel@1.1.0_2', 'practicalmeteor:chai', 'tinytest'], 'server'); api.addFiles('tests/server/GoogleTranslateTest.coffee', 'server'); });
Package.describe({ name: "practicalmeteor:google-translate", summary: "Server side synchronous google translate.", git: "https://github.com/practicalmeteor/meteor-google-translate.git", version: '0.9.0-rc0' }); Npm.depends({'googleapis': '1.0.21'}); Package.onUse(function (api) { api.versionsFrom('0.9.3'); api.use('coffeescript', 'server'); api.use(['practicalmeteor:loglevel@1.1.0_2', 'practicalmeteor:chai@1.9.2_3'], 'server'); // Uncomment once we upgrade to loglevel v2, with package specific loggers // api.addFiles('src/lib/log.js'); api.addFiles([ 'src/server/GoogleTranslate.coffee' ], "server"); //api.export('GoogleTranslate', 'server'); }); Package.onTest(function (api) { api.use([ 'coffeescript', 'practicalmeteor:google-translate', 'practicalmeteor:loglevel@1.1.0_2', 'spacejamio:chai', 'tinytest'], 'server'); api.addFiles('tests/server/GoogleTranslateTest.coffee', 'server'); });
Use relative url for loading documentation.json.
var Documentation = require('./model/Documentation'); var Component = require('../ui/Component'); var DocumentationReader = require('./DocumentationReader'); var $$ = Component.$$; var $ = require('../util/jquery'); var _ = require('../util/helpers'); var importDocumentation = require('./model/importDocumentation'); var _loadDocument = function(cb) { _.request('GET', './documentation.json', null, function(err, rawDoc) { if (err) { console.error(err); cb(err); } var doc = importDocumentation(rawDoc); window.doc = doc; // console.log('LE DOC', doc); cb(null, doc); }); }; $(function() { var doc = new Documentation(); window.doc = doc; _loadDocument(function(err, doc) { Component.mount($$(DocumentationReader, { doc: doc }), $('body')); }); });
var Documentation = require('./model/Documentation'); var Component = require('../ui/Component'); var DocumentationReader = require('./DocumentationReader'); var $$ = Component.$$; var $ = require('../util/jquery'); var _ = require('../util/helpers'); var importDocumentation = require('./model/importDocumentation'); var _loadDocument = function(cb) { _.request('GET', '/documentation.json', null, function(err, rawDoc) { if (err) { console.error(err); cb(err); } var doc = importDocumentation(rawDoc); window.doc = doc; // console.log('LE DOC', doc); cb(null, doc); }); }; $(function() { var doc = new Documentation(); window.doc = doc; _loadDocument(function(err, doc) { Component.mount($$(DocumentationReader, { doc: doc }), $('body')); }); });
Allow setting a flag for writable / non writable folders
package retrobox.utils; import java.io.File; import xtvapps.core.Utils; public class MountPoint { File dir; String description; String filesystem = "unknown"; boolean isWritable = false; public MountPoint(String path) { this.dir = new File(path); } public MountPoint(File dir) { this.dir = dir; } public MountPoint() { } public String getDescription() { return description; } public void setDescription(String description) { this.description = description; } public boolean isValid() { return dir!=null; } public long getFreeSpace() { return dir==null?0:dir.getFreeSpace(); } public long getTotalSpace() { return dir==null?0:dir.getTotalSpace(); } public File getDir() { return dir; } public String getFilesystem() { return filesystem; } public void setFilesystem(String filesystem) { this.filesystem = filesystem; } public String getFriendlyFreeSpace() { return Utils.size2humanDetailed(getFreeSpace()) + " free of " + Utils.size2humanDetailed(getTotalSpace()); } public boolean isWritable() { return isWritable; } public void setWritable(boolean isWritable) { this.isWritable = isWritable; } @Override public String toString() { return String.format("path:%s, desc:%s", dir!=null?dir.getAbsolutePath():"null", description); } }
package retrobox.utils; import java.io.File; import xtvapps.core.Utils; public class MountPoint { File dir; String description; String filesystem = "unknown"; public MountPoint(String path) { this.dir = new File(path); } public MountPoint(File dir) { this.dir = dir; } public MountPoint() { } public String getDescription() { return description; } public void setDescription(String description) { this.description = description; } public boolean isValid() { return dir!=null; } public long getFreeSpace() { return dir==null?0:dir.getFreeSpace(); } public long getTotalSpace() { return dir==null?0:dir.getTotalSpace(); } public File getDir() { return dir; } public String getFilesystem() { return filesystem; } public void setFilesystem(String filesystem) { this.filesystem = filesystem; } public String getFriendlyFreeSpace() { return Utils.size2humanDetailed(getFreeSpace()) + " free of " + Utils.size2humanDetailed(getTotalSpace()); } @Override public String toString() { return String.format("path:%s, desc:%s", dir!=null?dir.getAbsolutePath():"null", description); } }
Increase integration cli test memory Signed-off-by: Euan <82ce0a5f500076a0414f27984d8e19adc458729b@amazon.com>
// +build !windows package main import ( "github.com/docker/docker/pkg/integration/checker" "github.com/go-check/check" ) func (s *DockerSuite) TestInspectOomKilledTrue(c *check.C) { testRequires(c, DaemonIsLinux, memoryLimitSupport) name := "testoomkilled" _, exitCode, _ := dockerCmdWithError("run", "--name", name, "--memory", "32MB", "busybox", "sh", "-c", "x=a; while true; do x=$x$x$x$x; done") c.Assert(exitCode, checker.Equals, 137, check.Commentf("OOM exit should be 137")) oomKilled, err := inspectField(name, "State.OOMKilled") c.Assert(oomKilled, checker.Equals, "true") c.Assert(err, checker.IsNil) } func (s *DockerSuite) TestInspectOomKilledFalse(c *check.C) { testRequires(c, DaemonIsLinux, memoryLimitSupport) name := "testoomkilled" dockerCmd(c, "run", "--name", name, "--memory", "32MB", "busybox", "sh", "-c", "echo hello world") oomKilled, err := inspectField(name, "State.OOMKilled") c.Assert(oomKilled, checker.Equals, "false") c.Assert(err, checker.IsNil) }
// +build !windows package main import ( "github.com/docker/docker/pkg/integration/checker" "github.com/go-check/check" ) func (s *DockerSuite) TestInspectOomKilledTrue(c *check.C) { testRequires(c, DaemonIsLinux, memoryLimitSupport) name := "testoomkilled" _, exitCode, _ := dockerCmdWithError("run", "--name", name, "-m", "10MB", "busybox", "sh", "-c", "x=a; while true; do x=$x$x$x$x; done") c.Assert(exitCode, checker.Equals, 137, check.Commentf("OOM exit should be 137")) oomKilled, err := inspectField(name, "State.OOMKilled") c.Assert(oomKilled, checker.Equals, "true") c.Assert(err, checker.IsNil) } func (s *DockerSuite) TestInspectOomKilledFalse(c *check.C) { testRequires(c, DaemonIsLinux, memoryLimitSupport) name := "testoomkilled" dockerCmd(c, "run", "--name", name, "-m", "10MB", "busybox", "sh", "-c", "echo hello world") oomKilled, err := inspectField(name, "State.OOMKilled") c.Assert(oomKilled, checker.Equals, "false") c.Assert(err, checker.IsNil) }
Add API versioning for House endpoints.
'use strict' var passport = require('passport') module.exports = function(app) { var house = require('../controllers/houseController') var versioning = require('../config/versioning') app.route(versioning.url + '/houses') .get(house.list_all_houses) .post(passport.authenticate('jwt', { session: false }), function(req, res) { var token = getToken(req.headers) if (token) { // User from token is at req.user house.create_a_house(req, res) } else { return res.status(403).send({ success: false, message: 'Unauthorized.' }) } }) app.route(versioning.url + '/houses/:houseId') .get(house.read_a_house) .put(house.update_a_house) .delete(house.delete_a_house) } // JWT approach of getting token from request headers const getToken = (headers) => { if (headers && headers.authorization) { var parted = headers.authorization.split(' ') if (parted.length === 2) { return parted[1] } else { return null } } else { return null } }
'use strict' var passport = require('passport') module.exports = function(app) { var house = require('../controllers/houseController') app.route('/houses') .get(house.list_all_houses) .post(passport.authenticate('jwt', { session: false }), function(req, res) { var token = getToken(req.headers) if (token) { console.log("Creates a house: " + req.user) house.create_a_house(req, res) } else { return res.status(403).send({ success: false, message: 'Unauthorized.' }) } }) app.route('/houses/:houseId') .get(house.read_a_house) .put(house.update_a_house) .delete(house.delete_a_house) } const getToken = (headers) => { if (headers && headers.authorization) { var parted = headers.authorization.split(' ') if (parted.length === 2) { return parted[1] } else { return null } } else { return null } }
Change popup content to user input values
/** @module ember-flexberry-gis */ import Ember from 'ember'; import BaseMapCommand from './base'; /** Go to map-command. Moves map to a given geographic point. @class GoToMapCommand @extends BaseMapCommand */ export default BaseMapCommand.extend({ /** Executes map-command. @method execute */ _execute(options) { this._super(...arguments); let point = Ember.get(options, 'point'); let crs = Ember.get(options, 'crs'); let xCaption = Ember.get(options, 'xCaption'); let yCaption = Ember.get(options, 'yCaption'); let latlng = null; latlng = crs.unproject(point); let leafletMap = this.get('leafletMap'); leafletMap.panTo(latlng); let popupContent = `${xCaption}: ${point.x}; ${yCaption}: ${point.y}`; leafletMap.openPopup(popupContent, latlng); } });
/** @module ember-flexberry-gis */ import Ember from 'ember'; import BaseMapCommand from './base'; /** Go to map-command. Moves map to a given geographic point. @class GoToMapCommand @extends BaseMapCommand */ export default BaseMapCommand.extend({ /** Executes map-command. @method execute */ _execute(options) { this._super(...arguments); let point = Ember.get(options, 'point'); let crs = Ember.get(options, 'crs'); let xCaption = Ember.get(options, 'xCaption'); let yCaption = Ember.get(options, 'yCaption'); let latlng = null; latlng = crs.unproject(point); let leafletMap = this.get('leafletMap'); leafletMap.panTo(latlng); let popupContent = `${xCaption}: ${latlng.lng}; ${yCaption}: ${latlng.lat}`; leafletMap.openPopup(popupContent, latlng); } });
Fix dependencies for Django 1.7 Older versions of django-mptt will generate warnings
#!/usr/bin/env python from setuptools import setup, find_packages setup( name='blanc-basic-pages', version='0.2.1', description='Blanc Basic Pages for Django', long_description=open('README.rst').read(), url='https://github.com/blancltd/blanc-basic-pages', maintainer='Alex Tomkins', maintainer_email='alex@blanc.ltd.uk', platforms=['any'], install_requires=[ 'django-mptt>=0.6.1', 'django-mptt-admin>=0.1.8', ], packages=find_packages(), classifiers=[ 'Environment :: Web Environment', 'Framework :: Django', 'License :: OSI Approved :: BSD License', 'Operating System :: OS Independent', 'Programming Language :: Python', 'Programming Language :: Python :: 2', 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3', 'Programming Language :: Python :: 3.2', 'Programming Language :: Python :: 3.3', ], license='BSD', )
#!/usr/bin/env python from setuptools import setup, find_packages setup( name='blanc-basic-pages', version='0.2.1', description='Blanc Basic Pages for Django', long_description=open('README.rst').read(), url='https://github.com/blancltd/blanc-basic-pages', maintainer='Alex Tomkins', maintainer_email='alex@blanc.ltd.uk', platforms=['any'], install_requires=[ 'django-mptt>=0.6.0', 'django-mptt-admin==0.1.8', ], packages=find_packages(), classifiers=[ 'Environment :: Web Environment', 'Framework :: Django', 'License :: OSI Approved :: BSD License', 'Operating System :: OS Independent', 'Programming Language :: Python', 'Programming Language :: Python :: 2', 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3', 'Programming Language :: Python :: 3.2', 'Programming Language :: Python :: 3.3', ], license='BSD', )
Update script to store the basename instead of the complete path
#!/usr/bin/env python import click import os import codecs import json import pandas as pd @click.command() @click.argument('input_files', nargs=-1, type=click.Path(exists=True)) @click.argument('output_file', nargs=1, type=click.Path()) def nerstats(input_files, output_file): output_dir = os.path.dirname(output_file) if not os.path.exists(output_dir): os.makedirs(output_dir) frames = [] for fi in input_files: with codecs.open(fi, encoding='utf-8') as f: saf = json.load(f) data = {} data['word'] = [t['word'] for t in saf['tokens'] if 'ne' in t.keys()] data['ner'] = [t['ne'] for t in saf['tokens'] if 'ne' in t.keys()] data['w_id'] = [t['id'] for t in saf['tokens'] if 'ne' in t.keys()] data['text'] = [os.path.basename(fi) for t in saf['tokens'] if 'ne' in t.keys()] frames.append(pd.DataFrame(data=data)) df = pd.concat(frames, ignore_index=True) df.to_csv(output_file) if __name__ == '__main__': nerstats()
#!/usr/bin/env python import click import os import codecs import json import pandas as pd @click.command() @click.argument('input_files', nargs=-1, type=click.Path(exists=True)) @click.argument('output_file', nargs=1, type=click.Path()) def nerstats(input_files, output_file): output_dir = os.path.dirname(output_file) if not os.path.exists(output_dir): os.makedirs(output_dir) frames = [] for fi in input_files: with codecs.open(fi, encoding='utf-8') as f: saf = json.load(f) data = {} data['word'] = [t['word'] for t in saf['tokens'] if 'ne' in t.keys()] data['ner'] = [t['ne'] for t in saf['tokens'] if 'ne' in t.keys()] data['w_id'] = [t['id'] for t in saf['tokens'] if 'ne' in t.keys()] data['text'] = [fi for t in saf['tokens'] if 'ne' in t.keys()] frames.append(pd.DataFrame(data=data)) df = pd.concat(frames, ignore_index=True) df.to_csv(output_file) if __name__ == '__main__': nerstats()
Move ID & HASH & CARD in CardDetail classes from Controller to Model
// ---------------------------------------------------------------- // CardDetail Class // ---------------------------------------------------------------- // Model class CardDetailModel extends CommonModel { constructor({ name } = {}) { super({ name: name }); this.NAME = 'Card Detail Model'; this.EVENT = PS.CDE; this.ID = null; this.HASH = null; this.CARD = null; } } // ---------------------------------------------------------------- // View class CardDetailView extends CommonView { constructor(_model = new CardDetailModel()) { super(_model); this.NAME = 'Card Detail View'; } } // ---------------------------------------------------------------- // Controller class CardDetailController extends CommonController { constructor(_obj) { super(_obj); this.model = new CardDetailModel(_obj); this.view = new CardDetailView(this.model); this.NAME = 'Card Detail Controller'; } } // ---------------------------------------------------------------- // Event class CardDetailEvent extends CommonEvent { constructor({ name = 'Card Detail Event' } = {}) { super({ name: name }); PS.CDE = this; this.NAME = name; this.CONTROLLER = new CardDetailController({ name: 'Card Detail Controller', }); } }
// ---------------------------------------------------------------- // CardDetail Class // ---------------------------------------------------------------- // Model class CardDetailModel extends CommonModel { constructor({ name } = {}) { super({ name: name }); this.NAME = 'Card Detail Model'; this.EVENT = PS.CDE; } } // ---------------------------------------------------------------- // View class CardDetailView extends CommonView { constructor(_model = new CardDetailModel()) { super(_model); this.NAME = 'Card Detail View'; } } // ---------------------------------------------------------------- // Controller class CardDetailController extends CommonController { constructor(_obj) { super(_obj); this.model = new CardDetailModel(_obj); this.view = new CardDetailView(this.model); this.NAME = 'Card Detail Controller'; this.model.ID = null; this.model.HASH = null; this.model.CARD = null; } } // ---------------------------------------------------------------- // Event class CardDetailEvent extends CommonEvent { constructor({ name = 'Card Detail Event' } = {}) { super({ name: name }); PS.CDE = this; this.NAME = name; this.CONTROLLER = new CardDetailController({ name: 'Card Detail Controller', }); } }
Use clone method for Stripes objects
import React from 'react'; import Route from 'react-router-dom/Route'; import { connectFor } from '@folio/stripes-connect'; import { modules } from 'stripes-loader'; // eslint-disable-line import AddContext from './AddContext'; if (!Array.isArray(modules.app) && modules.length < 0) { throw new Error('At least one module of type "app" must be enabled.'); } function getModuleRoutes(stripes) { return modules.app.map((module) => { const name = module.module.replace(/^@folio\//, ''); const perm = `module.${name}.enabled`; if (!stripes.hasPerm(perm)) return null; const connect = connectFor(module.module, stripes.epics, stripes.logger); const Current = connect(module.getModule()); const moduleStripes = stripes.clone({ connect }); return ( <Route path={module.route} key={module.route} render={props => <AddContext context={{ stripes: moduleStripes }}> <Current {...props} connect={connect} stripes={moduleStripes} /> </AddContext> } /> ); }).filter(x => x); } export default getModuleRoutes;
import React from 'react'; import Route from 'react-router-dom/Route'; import { connectFor } from '@folio/stripes-connect'; import { modules } from 'stripes-loader'; // eslint-disable-line import AddContext from './AddContext'; if (!Array.isArray(modules.app) && modules.length < 0) { throw new Error('At least one module of type "app" must be enabled.'); } function getModuleRoutes(stripes) { return modules.app.map((module) => { const name = module.module.replace(/^@folio\//, ''); const perm = `module.${name}.enabled`; if (!stripes.hasPerm(perm)) return null; const connect = connectFor(module.module, stripes.epics, stripes.logger); const Current = connect(module.getModule()); const moduleStripes = Object.assign({}, stripes, { connect }); return ( <Route path={module.route} key={module.route} render={props => <AddContext context={{ stripes: moduleStripes }}> <Current {...props} connect={connect} stripes={moduleStripes} /> </AddContext> } /> ); }).filter(x => x); } export default getModuleRoutes;
Add check for valid location
<?php namespace PoGoPHP\Location; use GuzzleHttp\Exception\GuzzleException; use PoGoPHP\Http\HttpClientAwareInterface; use PoGoPHP\Http\HttpClientAwareTrait; class LocationSearcher implements HttpClientAwareInterface { use HttpClientAwareTrait; public static $maps_url = 'https://maps.google.com/maps/api/geocode/json'; /** * @param string $location * @return Location * @throws LocationException */ public function search($location) { try { $response = $this->httpClient->request('GET', (static::$maps_url . '?' . http_build_query(['address' => $location]))); } catch (GuzzleException $e) { throw new LocationException("Error while trying to get location: {$e->getMessage()}", $e->getCode()); } $data = json_decode($response->getBody()); if ($data === null) { throw new LocationException('Received invalid or null data from Google Maps'); } if (empty($data->results)) { throw new LocationException("Location '{$location}' not found"); } return new Location( $data->results[0]->geometry->location->lat, $data->results[0]->geometry->location->lng ); } }
<?php namespace PoGoPHP\Location; use GuzzleHttp\Exception\GuzzleException; use PoGoPHP\Http\HttpClientAwareInterface; use PoGoPHP\Http\HttpClientAwareTrait; class LocationSearcher implements HttpClientAwareInterface { use HttpClientAwareTrait; public static $maps_url = 'https://maps.google.com/maps/api/geocode/json'; /** * @param string $location * @return Location * @throws LocationException */ public function search($location) { try { $response = $this->httpClient->request('GET', (static::$maps_url . '?' . http_build_query(['address' => $location]))); } catch (GuzzleException $e) { throw new LocationException("Error while trying to get location: {$e->getMessage()}", $e->getCode()); } $data = json_decode($response->getBody()); if ($data === null) { throw new LocationException('Received invalid or null data from Google Maps'); } return new Location( $data->results[0]->geometry->location->lat, $data->results[0]->geometry->location->lng ); } }
Remove a needless static reference The static method has been imported, no need to explicitly name the class.
package io.dropwizard.logging; import ch.qos.logback.classic.pattern.RootCauseFirstThrowableProxyConverter; import ch.qos.logback.classic.spi.IThrowableProxy; import java.util.regex.Pattern; import static io.dropwizard.logging.PrefixedThrowableProxyConverter.*; /** * A {@link RootCauseFirstThrowableProxyConverter} that prefixes stack traces with {@code !}. */ public class PrefixedRootCauseFirstThrowableProxyConverter extends RootCauseFirstThrowableProxyConverter { private static final String CAUSING = PREFIX + "Causing:"; private static final Pattern CAUSING_PATTERN = Pattern.compile("^" + Pattern.quote(PREFIX) + "Wrapped by:", Pattern.MULTILINE); @Override protected String throwableProxyToString(IThrowableProxy tp) { final String prefixed = PATTERN.matcher(super.throwableProxyToString(tp)).replaceAll(PREFIX); return CAUSING_PATTERN.matcher(prefixed).replaceAll(CAUSING); } }
package io.dropwizard.logging; import ch.qos.logback.classic.pattern.RootCauseFirstThrowableProxyConverter; import ch.qos.logback.classic.spi.IThrowableProxy; import java.util.regex.Pattern; import static io.dropwizard.logging.PrefixedThrowableProxyConverter.*; /** * A {@link RootCauseFirstThrowableProxyConverter} that prefixes stack traces with {@code !}. */ public class PrefixedRootCauseFirstThrowableProxyConverter extends RootCauseFirstThrowableProxyConverter { private static final String CAUSING = PREFIX + "Causing:"; private static final Pattern CAUSING_PATTERN = Pattern.compile( "^" + Pattern.quote(PrefixedThrowableProxyConverter.PREFIX) + "Wrapped by:", Pattern.MULTILINE); @Override protected String throwableProxyToString(IThrowableProxy tp) { final String prefixed = PATTERN.matcher(super.throwableProxyToString(tp)).replaceAll(PREFIX); return CAUSING_PATTERN.matcher(prefixed).replaceAll(CAUSING); } }
Replace custom author tags with @author Apache Software Foundation. git-svn-id: 774b6be6af7f353471b728afb213ebe1be89b277@130378 13f79535-47bb-0310-9956-ffa450edef68
/* * Copyright 2001-2004 The Apache Software Foundation. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.commons.codec; /** * Defines common decoding methods for byte array decoders. * * @author Apache Software Foundation * @version $Id: BinaryDecoder.java,v 1.10 2004/06/15 18:14:15 ggregory Exp $ */ public interface BinaryDecoder extends Decoder { /** * Decodes a byte array and returns the results as a byte array. * * @param pArray A byte array which has been encoded with the * appropriate encoder * * @return a byte array that contains decoded content * * @throws DecoderException A decoder exception is thrown * if a Decoder encounters a failure condition during * the decode process. */ byte[] decode(byte[] pArray) throws DecoderException; }
/* * Copyright 2001-2004 The Apache Software Foundation. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.commons.codec; /** * Defines common decoding methods for byte array decoders. * * @author Tim O'Brien * @author Gary Gregory * @version $Id: BinaryDecoder.java,v 1.9 2004/02/23 07:32:49 ggregory Exp $ */ public interface BinaryDecoder extends Decoder { /** * Decodes a byte array and returns the results as a byte array. * * @param pArray A byte array which has been encoded with the * appropriate encoder * * @return a byte array that contains decoded content * * @throws DecoderException A decoder exception is thrown * if a Decoder encounters a failure condition during * the decode process. */ byte[] decode(byte[] pArray) throws DecoderException; }
Make provider-carousel test account for whitespace
import {moduleForComponent, test} from 'ember-qunit'; import hbs from 'htmlbars-inline-precompile'; import Ember from 'ember'; moduleForComponent('provider-carousel', 'Integration | Component | provider carousel', { integration: true }); test('it renders', function (assert) { // Set any properties with this.set('myProperty', 'value'); // Handle any actions with this.on('myAction', function(val) { ... }); this.set('provider1', Ember.Object.create({id: 'asu'})); this.set('provider2', Ember.Object.create({id: 'psyarxiv'})); this.set('provider3', Ember.Object.create({id: 'socarxiv'})); this.set('provider4', Ember.Object.create({id: 'engrxiv'})); this.set('provider5', Ember.Object.create({id: 'osf'})); this.set('provider6', Ember.Object.create({id: 'testprovider'})); this.set('provider7', Ember.Object.create({id: 'testprovider2'})); this.set('providers', [this.get('provider1'), this.get('provider2'), this.get('provider3'), this.get('provider4'), this.get('provider5'), this.get('provider6'), this.get('provider7')]); this.render(hbs`{{provider-carousel providers=providers }}`); assert.ok(/^\s*Previous\s*Next\s*$/.test(this.$().context.innerText)); });
import {moduleForComponent, skip} from 'ember-qunit'; import hbs from 'htmlbars-inline-precompile'; import Ember from 'ember'; moduleForComponent('provider-carousel', 'Integration | Component | provider carousel', { integration: true }); skip('it renders', function (assert) { // Set any properties with this.set('myProperty', 'value'); // Handle any actions with this.on('myAction', function(val) { ... }); this.set('provider1', Ember.Object.create({id: 'asu'})); this.set('provider2', Ember.Object.create({id: 'psyarxiv'})); this.set('provider3', Ember.Object.create({id: 'socarxiv'})); this.set('provider4', Ember.Object.create({id: 'engrxiv'})); this.set('provider5', Ember.Object.create({id: 'osf'})); this.set('provider6', Ember.Object.create({id: 'testprovider'})); this.set('provider7', Ember.Object.create({id: 'testprovider2'})); this.set('providers', [this.get('provider1'), this.get('provider2'), this.get('provider3'), this.get('provider4'), this.get('provider5'), this.get('provider6'), this.get('provider7')]); this.render(hbs`{{provider-carousel providers=providers }}`); assert.equal(this.$().context.innerText, 'PreviousNext'); });
Fix the test; solution found by Christian Heimes. Thanks!
#! -*- coding: koi8-r -*- # This file is marked as binary in the CVS, to prevent MacCVS from recoding it. import unittest from test import test_support class PEP263Test(unittest.TestCase): def test_pep263(self): self.assertEqual( "".encode("utf-8"), b'\xd0\x9f\xd0\xb8\xd1\x82\xd0\xbe\xd0\xbd' ) self.assertEqual( "\".encode("utf-8"), b'\\\xd0\x9f' ) def test_main(): test_support.run_unittest(PEP263Test) if __name__=="__main__": test_main()
#! -*- coding: koi8-r -*- # This file is marked as binary in the CVS, to prevent MacCVS from recoding it. import unittest from test import test_support class PEP263Test(unittest.TestCase): def test_pep263(self): self.assertEqual( "".encode("utf-8"), '\xd0\x9f\xd0\xb8\xd1\x82\xd0\xbe\xd0\xbd' ) self.assertEqual( "\".encode("utf-8"), '\\\xd0\x9f' ) def test_main(): test_support.run_unittest(PEP263Test) if __name__=="__main__": test_main()
Initialize osprofiler in WSGI application This patch adds missing initialization of OSProfiler when Cinder API is running as WSGI application. Change-Id: Ifaffa2d58eeadf5d47fafbdde5538d26bcd346a6
# Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """Cinder OS API WSGI application.""" import sys import warnings from cinder import objects warnings.simplefilter('once', DeprecationWarning) from oslo_config import cfg from oslo_log import log as logging from oslo_service import wsgi from cinder import i18n i18n.enable_lazy() # Need to register global_opts from cinder.common import config from cinder.common import constants from cinder import rpc from cinder import service from cinder import version CONF = cfg.CONF def initialize_application(): objects.register_all() CONF(sys.argv[1:], project='cinder', version=version.version_string()) logging.setup(CONF, "cinder") config.set_middleware_defaults() rpc.init(CONF) service.setup_profiler(constants.API_BINARY, CONF.host) return wsgi.Loader(CONF).load_app(name='osapi_volume')
# Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """Cinder OS API WSGI application.""" import sys import warnings from cinder import objects warnings.simplefilter('once', DeprecationWarning) from oslo_config import cfg from oslo_log import log as logging from oslo_service import wsgi from cinder import i18n i18n.enable_lazy() # Need to register global_opts from cinder.common import config from cinder import rpc from cinder import version CONF = cfg.CONF def initialize_application(): objects.register_all() CONF(sys.argv[1:], project='cinder', version=version.version_string()) logging.setup(CONF, "cinder") config.set_middleware_defaults() rpc.init(CONF) return wsgi.Loader(CONF).load_app(name='osapi_volume')
Add pointerLock() custom element support. Increase readability.
import THREE from 'three'; export default function pointerLock( controls, element = document.body ) { const hasPointerLock = ( 'pointerLockElement' in document || 'mozPointerLockElement' in document || 'webkitPointerLockElement' in document ); if ( !hasPointerLock ) { return; } const dispatcher = new THREE.EventDispatcher(); function onPointerLockChange() { controls.enabled = ( element === document.pointerLockElement || element === document.mozPointerLockElement || element === document.webkitPointerLockElement ); dispatcher.dispatchEvent({ type: 'change', enabled: controls.enabled }); } function onPointerLockError() { dispatcher.dispatchEvent({ type: 'error' }); } document.addEventListener( 'pointerlockchange', onPointerLockChange ); document.addEventListener( 'mozpointerlockchange', onPointerLockChange ); document.addEventListener( 'webkitpointerlockchange', onPointerLockChange ); document.addEventListener( 'pointerlockerror', onPointerLockError ); document.addEventListener( 'mozpointerlockerror', onPointerLockError ); document.addEventListener( 'webkitpointerlockerror', onPointerLockError ); element.requestPointerLock = ( element.requestPointerLock || element.mozRequestPointerLock || element.webkitRequestPointerLock ); document.addEventListener( 'click', () => element.requestPointerLock() ); return dispatcher; }
import THREE from 'three'; export default function pointerLock( controls ) { const hasPointerLock = 'pointerLockElement' in document || 'mozPointerLockElement' in document || 'webkitPointerLockElement' in document; if ( !hasPointerLock ) { return; } const element = document.body; const dispatcher = new THREE.EventDispatcher(); function onPointerLockChange() { if ( document.pointerLockElement === element || document.mozPointerLockElement === element || document.webkitPointerLockElement === element ) { controls.enabled = true; } else { controls.enabled = false; } dispatcher.dispatchEvent({ type: 'change', enabled: controls.enabled }); } function onPointerLockError() { dispatcher.dispatchEvent({ type: 'error' }); } document.addEventListener( 'pointerlockchange', onPointerLockChange ); document.addEventListener( 'mozpointerlockchange', onPointerLockChange ); document.addEventListener( 'webkitpointerlockchange', onPointerLockChange ); document.addEventListener( 'pointerlockerror', onPointerLockError ); document.addEventListener( 'mozpointerlockerror', onPointerLockError ); document.addEventListener( 'webkitpointerlockerror', onPointerLockError ); element.requestPointerLock = element.requestPointerLock || element.mozRequestPointerLock || element.webkitRequestPointerLock; document.addEventListener( 'click', () => element.requestPointerLock() ); return dispatcher; }
Enforce compatibility of the `undefined` and `default` setting for navbar color scheme. This makes the demo work for the default, but the inverse still doesn't work because of different scheme names.
import { computed } from '@ember/object'; import Navbar from 'ember-bootstrap/components/base/bs-navbar'; export default Navbar.extend({ classNameBindings: ['breakpointClass', 'backgroundClass'], type: computed('appliedType', { get() { return this.get('appliedType'); }, set(key, value) { // eslint-disable-line no-unused let newValue = (!value || value === 'default') ? 'light' : value; this.set('appliedType', newValue); return newValue; } }), appliedType: 'light', /** * Defines the responsive toggle breakpoint size. Options are the standard * two character Bootstrap size abbreviations. Used to set the `navbar-expand-*` * class. * * @property toggleBreakpoint * @type String * @default 'md' * @public */ toggleBreakpoint: 'lg', /** * Sets the background color for the navbar. Can be any color * in the set that composes the `bg-*` classes. * * @property backgroundColor * @type String * @default 'light' * @public */ backgroundColor: 'light', breakpointClass: computed('toggleBreakpoint', function() { let toggleBreakpoint = this.get('toggleBreakpoint'); return `navbar-expand-${toggleBreakpoint}`; }), backgroundClass: computed('backgroundColor', function() { let backgroundColor = this.get('backgroundColor'); return `bg-${backgroundColor}`; }), _validPositions: ['fixed-top', 'fixed-bottom', 'sticky-top'], _positionPrefix: '' });
import { computed } from '@ember/object'; import Navbar from 'ember-bootstrap/components/base/bs-navbar'; export default Navbar.extend({ classNameBindings: ['breakpointClass', 'backgroundClass'], type: 'light', /** * Defines the responsive toggle breakpoint size. Options are the standard * two character Bootstrap size abbreviations. Used to set the `navbar-expand-*` * class. * * @property toggleBreakpoint * @type String * @default 'md' * @public */ toggleBreakpoint: 'lg', /** * Sets the background color for the navbar. Can be any color * in the set that composes the `bg-*` classes. * * @property backgroundColor * @type String * @default 'light' * @public */ backgroundColor: 'light', breakpointClass: computed('toggleBreakpoint', function() { let toggleBreakpoint = this.get('toggleBreakpoint'); return `navbar-expand-${toggleBreakpoint}`; }), backgroundClass: computed('backgroundColor', function() { let backgroundColor = this.get('backgroundColor'); return `bg-${backgroundColor}`; }), _validPositions: ['fixed-top', 'fixed-bottom', 'sticky-top'], _positionPrefix: '' });
Update to use Laravel Console Menu version 3.x
<?php declare(strict_types=1); /** * This file is part of Laravel Zero. * * (c) Nuno Maduro <enunomaduro@gmail.com> * * For the full copyright and license information, please view the LICENSE * file that was distributed with this source code. */ namespace LaravelZero\Framework\Components\Menu; use LaravelZero\Framework\Components\AbstractInstaller; /** * @internal */ final class Installer extends AbstractInstaller { /** * {@inheritdoc} */ protected $name = 'install:menu'; /** * {@inheritdoc} */ protected $description = 'Menu: Build beautiful CLI interactive menus'; /** * {@inheritdoc} */ public function install(): void { $this->require('nunomaduro/laravel-console-menu "^3.0"'); } }
<?php declare(strict_types=1); /** * This file is part of Laravel Zero. * * (c) Nuno Maduro <enunomaduro@gmail.com> * * For the full copyright and license information, please view the LICENSE * file that was distributed with this source code. */ namespace LaravelZero\Framework\Components\Menu; use LaravelZero\Framework\Components\AbstractInstaller; /** * @internal */ final class Installer extends AbstractInstaller { /** * {@inheritdoc} */ protected $name = 'install:menu'; /** * {@inheritdoc} */ protected $description = 'Menu: Build beautiful CLI interactive menus'; /** * {@inheritdoc} */ public function install(): void { $this->require('nunomaduro/laravel-console-menu "^2.3"'); } }
Add arguments validations: 1. Enable excluding secondaryBroadcstingUrl. 2. Validate https playback url was provided before setting its value.
<?php require_once(dirname(__FILE__).'/../bootstrap.php'); if ($argc < 4) die ("Required parameters not received. Run script in the following format: php setPartnerBroadcastingUrls.php {partnerId} {parimary broadcast URL} {HTTP playback URL} [{secondary broadcast URL}] [{HTTPS playback URL}]"); $partnerId = $argv[1]; $primaryBroadcatUrl = $argv[2]; $httpPlaybackUrl = $argv[3]; $secondaryBroadcastUrl = null; if (isset($argv[4]) && $argv[4] != 0) $secondaryBroadcastUrl = $argv[4]; $httpsPlaybackUrl = null; if (isset($argv[5])) $httpsPlaybackUrl = $argv[5]; $partner = PartnerPeer::retrieveByPK($partnerId); if (!$partner) { die ("Partner with id {$partnerId} not found."); } $partner->setBroadcastUrlManager('kPartnerBroadcastUrlManager'); $partner->setPrimaryBroadcastUrl($primaryBroadcatUrl); $partner->setSecondaryBroadcastUrl($secondaryBroadcastUrl); $liveStreamConfigurations = array ('http' => $httpPlaybackUrl); if($httpsPlaybackUrl) $liveStreamConfigurations = array ('https' => $httpsPlaybackUrl); $partner->setLiveStreamPlaybackUrlConfigurations($liveStreamConfigurations); $partner->save();
<?php require_once(dirname(__FILE__).'/../bootstrap.php'); if ($argc < 4) die ("Required parameters not received. Run script in the following format: php setPartnerBroadcastingUrls.php {partnerId} {parimary broadcast URL} {HTTP playback URL} [{secondary broadcast URL}] [{HTTPS playback URL}]"); $partnerId = $argv[1]; $primaryBroadcatUrl = $argv[2]; $httpPlaybackUrl = $argv[3]; $secondaryBroadcastUrl = null; if (isset($argv[4])) $secondaryBroadcastUrl = $argv[4]; $httpsPlaybackUrl = null; if (isset($argv[5])) $httpsPlaybackUrl = $argv[5]; $partner = PartnerPeer::retrieveByPK($partnerId); if (!$partner) { die ("Partner with id {$partnerId} not found."); } $partner->setBroadcastUrlManager('kPartnerBroadcastUrlManager'); $partner->setPrimaryBroadcastUrl($primaryBroadcatUrl); $partner->setSecondaryBroadcastUrl($secondaryBroadcastUrl); $liveStreamConfigurations = array ('http' => $httpPlaybackUrl); $liveStreamConfigurations = array ('https' => $httpsPlaybackUrl); $partner->setLiveStreamPlaybackUrlConfigurations($liveStreamConfigurations); $partner->save();
Add home link in docs. [ci skip]
<?php // cleanup requested file path $filePath = isset($_GET['f']) ? $_GET['f'] : 'lodash'; $filePath = preg_replace('#(\.*[\/])+#', '', $filePath); $filePath .= preg_match('/\.[a-z]+$/', $filePath) ? '' : '.js'; // output filename if (isset($_GET['o'])) { $outputName = $_GET['o']; } else if (isset($_SERVER['argv'][1])) { $outputName = $_SERVER['argv'][1]; } else { $outputName = basename($filePath); } /*--------------------------------------------------------------------------*/ require('../vendor/docdown/docdown.php'); // get package version $version = json_decode(file_get_contents('../package.json'))->version; // generate Markdown $markdown = docdown(array( 'path' => '../' . $filePath, 'title' => '<a href="http://lodash.com">Lo-Dash</a> <span>v' . $version . '</span>', 'toc' => 'categories', 'url' => 'https://github.com/lodash/lodash/blob/master/lodash.js' )); // save to a `.md` file file_put_contents($outputName . '.md', $markdown); // print header('Content-Type: text/plain;charset=utf-8'); echo $markdown . PHP_EOL; ?>
<?php // cleanup requested file path $filePath = isset($_GET['f']) ? $_GET['f'] : 'lodash'; $filePath = preg_replace('#(\.*[\/])+#', '', $filePath); $filePath .= preg_match('/\.[a-z]+$/', $filePath) ? '' : '.js'; // output filename if (isset($_GET['o'])) { $outputName = $_GET['o']; } else if (isset($_SERVER['argv'][1])) { $outputName = $_SERVER['argv'][1]; } else { $outputName = basename($filePath); } /*--------------------------------------------------------------------------*/ require('../vendor/docdown/docdown.php'); // get package version $version = json_decode(file_get_contents('../package.json'))->version; // generate Markdown $markdown = docdown(array( 'path' => '../' . $filePath, 'title' => 'Lo-Dash <span>v' . $version . '</span>', 'toc' => 'categories', 'url' => 'https://github.com/lodash/lodash/blob/master/lodash.js' )); // save to a `.md` file file_put_contents($outputName . '.md', $markdown); // print header('Content-Type: text/plain;charset=utf-8'); echo $markdown . PHP_EOL; ?>
Exit with nonzero when qq strings are missing Change-Id: Ife0f114dbe48faa445397aa7a94f74de2309d117
#!/usr/bin/env python import os import sys import xml.etree.ElementTree as ET RES_FOLDER = os.path.abspath(os.path.join(os.path.dirname(__file__), "../app/src/main/res")) EN_STRINGS = os.path.join(RES_FOLDER, "values/strings.xml") QQ_STRINGS = os.path.join(RES_FOLDER, "values-qq/strings.xml") # Get ElementTree containing all message names in English enroot = ET.parse(EN_STRINGS).getroot() # Get ElementTree containing all documented messages qqroot = ET.parse(QQ_STRINGS).getroot() # Create a set to store all documented messages qqmsgs = set() # Add all documented messages to that set for child in qqroot: qqmsgs.add(child.attrib['name']) # Iterate through all messages and check that they're documented missing = 0 for child in enroot: if child.attrib['name'] not in qqmsgs: print(child.attrib['name'] + " is undocumented!") missing += 1 sys.exit(1 if missing else 0)
#!/usr/bin/env python import os import xml.etree.ElementTree as ET RES_FOLDER = os.path.abspath(os.path.join(os.path.dirname(__file__), "../app/src/main/res")) EN_STRINGS = os.path.join(RES_FOLDER, "values/strings.xml") QQ_STRINGS = os.path.join(RES_FOLDER, "values-qq/strings.xml") # Get ElementTree containing all message names in English enroot = ET.parse(EN_STRINGS).getroot() # Get ElementTree containing all documented messages qqroot = ET.parse(QQ_STRINGS).getroot() # Create a set to store all documented messages qqmsgs = set() # Add all documented messages to that set for child in qqroot: qqmsgs.add(child.attrib['name']) # Iterate through all messages and check that they're documented for child in enroot: if child.attrib['name'] not in qqmsgs: print(child.attrib['name'] + " is undocumented!")
refactor: Move program option object into its own function
"use strict"; var program = require('commander'); var path = require('path'); var appDir = path.resolve(__dirname, '..', 'app'); var newCommand = require('./commands/new')(appDir); var pkg = require(path.resolve(__dirname, '..', 'package.json')); module.exports = function() { setProgramBaseSettings(); setNewCommand(); initProgram(); } function setNewCommand() { program .command('new') .description('Create a new project') .action(function() { newCommand.init(getProgramOptions()); }); } function initProgram() { program.parse(process.argv); if (!process.argv.slice(2).length) { program.outputHelp(); } } function getProgramOptions() { return { silent: program.silent, verbose: program.verbose } } function setProgramBaseSettings() { program .version(pkg.version) .option('-s, --silent', 'Surpress all on-screen messages') .option('-v, --verbose', 'Show all on-screen messages'); }
"use strict"; var program = require('commander'); var path = require('path'); var appDir = path.resolve(__dirname, '..', 'app'); var newCommand = require('./commands/new')(appDir); var pkg = require(path.resolve(__dirname, '..', 'package.json')); module.exports = function() { setProgramBaseSettings(); setNewCommand(); initProgram(); } function setNewCommand() { program .command('new') .description('Create a new project') .action(function() { newCommand.init({ silent: program.silent, verbose: program.verbose }); }); } function initProgram() { program.parse(process.argv); if (!process.argv.slice(2).length) { program.outputHelp(); } } function setProgramBaseSettings() { program .version(pkg.version) .option('-s, --silent', 'Surpress all on-screen messages') .option('-v, --verbose', 'Show all on-screen messages'); }
Raise allowed difference for RandomForest
import numpy as np from nose.tools import assert_true from sklearn.ensemble import RandomForestClassifier from splearn.ensemble import SparkRandomForestClassifier from splearn.utils.testing import SplearnTestCase from splearn.utils.validation import check_rdd_dtype class TestSparkRandomForest(SplearnTestCase): def test_same_predictions(self): X, y, Z = self.make_classification(2, 10000) local = RandomForestClassifier() dist = SparkRandomForestClassifier() y_local = local.fit(X, y).predict(X) y_dist = dist.fit(Z, classes=np.unique(y)).predict(Z[:, 'X']) y_conv = dist.to_scikit().predict(X) assert_true(check_rdd_dtype(y_dist, (np.ndarray,))) assert(sum(y_local != y_dist.toarray()) < len(y_local) * 5./100.) assert(sum(y_local != y_conv) < len(y_local) * 5./100.)
import numpy as np from nose.tools import assert_true from sklearn.ensemble import RandomForestClassifier from splearn.ensemble import SparkRandomForestClassifier from splearn.utils.testing import SplearnTestCase from splearn.utils.validation import check_rdd_dtype class TestSparkRandomForest(SplearnTestCase): def test_same_predictions(self): X, y, Z = self.make_classification(2, 10000) local = RandomForestClassifier() dist = SparkRandomForestClassifier() y_local = local.fit(X, y).predict(X) y_dist = dist.fit(Z, classes=np.unique(y)).predict(Z[:, 'X']) y_conv = dist.to_scikit().predict(X) assert_true(check_rdd_dtype(y_dist, (np.ndarray,))) assert(sum(y_local != y_dist.toarray()) < len(y_local) * 2./100.) assert(sum(y_local != y_conv) < len(y_local) * 2./100.)
Set max threads for MKL.
import logging, gensim, bz2 import mkl from knub.thesis.util.memory import limit_memory logging.basicConfig(format='%(asctime)s : %(levelname)s : %(message)s', level=logging.INFO) mkl.set_num_threads(8) def main(): logging.info("Starting Wikipedia LDA") # limit memory to 32 GB limit_memory(32000) id2word = gensim.corpora.Dictionary.load_from_text(bz2.BZ2File("/data/wikipedia/2016-06-21/gensim_wordids.txt.bz2")) mm = gensim.corpora.MmCorpus("/data/wikipedia/2016-06-21/gensim_tfidf.mm") print mm lda = gensim.models.ldamodel.LdaModel(corpus=mm, num_topics=100, id2word=id2word, chunksize=10000, passes=1) # lda = gensim.models.ldamodel.LdaModel(corpus=mm, num_topics=100, id2word=id2word, workers=3) lda.save("/data/wikipedia/2016-06-21/topics.model") lda.print_topics() logging.info("Finished Wikipedia LDA") if __name__ == "__main__": main()
import logging, gensim, bz2 from knub.thesis.util.memory import limit_memory logging.basicConfig(format='%(asctime)s : %(levelname)s : %(message)s', level=logging.INFO) def main(): logging.info("Starting Wikipedia LDA") # limit memory to 32 GB limit_memory(32000) id2word = gensim.corpora.Dictionary.load_from_text(bz2.BZ2File("/data/wikipedia/2016-06-21/gensim_wordids.txt.bz2")) mm = gensim.corpora.MmCorpus("/data/wikipedia/2016-06-21/gensim_tfidf.mm") print mm lda = gensim.models.ldamodel.LdaModel(corpus=mm, num_topics=100, id2word=id2word, chunksize=10000, passes=1) # lda = gensim.models.ldamodel.LdaModel(corpus=mm, num_topics=100, id2word=id2word, workers=3) lda.save("/data/wikipedia/2016-06-21/topics.model") lda.print_topics() logging.info("Finished Wikipedia LDA") if __name__ == "__main__": main()
Add links to idvidual inventory scenes
import React from 'react' import InventoryContainer from '../containers/InventoryContainer' import ShadowBox from '../Components/ShadowBox' import FlexDiv from '../Components/FlexDiv' const InventoryBox = ShadowBox.extend` width:100%; margin:10px; ` const InventoryDiv = FlexDiv.extend` width:50%; margin-top: 50px; ` const InventoryScene = () => { const maltColumns = [ {name: "Malt Name", type: "text"}, {name: "Amount", type: "number"} ] const yeastColumns = [ {name: "Yeast Name", type: "text"}, {name: "Amount", type: "number"} ] const hopColumns = [ {name: "Hop Name", type: "text"}, {name: "Amount", type: "number"} ] return( <InventoryDiv> <InventoryBox> <InventoryContainer name="malt" columns={maltColumns} displayLimit={5} /> <a href="/inventory/malt">Malt</a> </InventoryBox> <InventoryBox> <InventoryContainer name="yeast" columns={yeastColumns} displayLimit={5} /> <a href="/inventory/yeast">Yeast</a> </InventoryBox> <InventoryBox> <InventoryContainer name="hops" columns={hopColumns} displayLimit={5} /> <a href="/inventory/hops">Hops</a> </InventoryBox> </InventoryDiv> ) } export default InventoryScene
import React from 'react' import InventoryContainer from '../containers/InventoryContainer' import ShadowBox from '../Components/ShadowBox' import FlexDiv from '../Components/FlexDiv' const InventoryBox = ShadowBox.extend` width:100%; margin:10px; ` const InventoryDiv = FlexDiv.extend` width:50%; margin-top: 50px; ` const InventoryScene = () => { const maltColumns = [ {name: "Malt Name", type: "text"}, {name: "Amount", type: "number"} ] const yeastColumns = [ {name: "Yeast Name", type: "text"}, {name: "Amount", type: "number"} ] const hopColumns = [ {name: "Hop Name", type: "text"}, {name: "Amount", type: "number"} ] return( <InventoryDiv> <InventoryBox> <InventoryContainer name="malt" columns={maltColumns} displayLimit={5} /> </InventoryBox> <InventoryBox> <InventoryContainer name="yeast" columns={yeastColumns} displayLimit={5} /> </InventoryBox> <InventoryBox> <InventoryContainer name="hops" columns={hopColumns} displayLimit={5} /> </InventoryBox> </InventoryDiv> ) } export default InventoryScene
Add / Fouc out (on link clic)
console.log('main.js'); // Load Css async w LoadCSS & +1 polyfill / https://www.npmjs.com/package/fg-loadcss?notice=MIvGLZ2qXNAEF8AM1kvyFWL8p-1MwaU7UpJd8jcG var stylesheet = loadCSS( "styles/main.css" ); onloadCSS( stylesheet, function() { console.log( "LoadCSS > Stylesheet has loaded. Yay !" ); // + No Fouc management $('.no-fouc').fadeIn(); // Lovely Jquery animation on load // Fouc out management $('a').click(function(e) { e.preventDefault(); newLocation = this.href; $('body').fadeOut(200, function() { window.location = newLocation; }); }); }); // Load Hyphenopoly plugins, manage font césure & text FOUC // Need to be loaded beofre HyphenopolyLoader, cf. gulpfile paths.scripts.src var Hyphenopoly = { require: { "en-us": "hyphenation" }, paths: { patterndir: 'assets/hyphenopoly/patterns/', maindir: 'assets/hyphenopoly/' }, setup: { classnames: { "hyphenate": {} } } };
console.log('main.js'); // Load Css async w LoadCSS & +1 polyfill / https://www.npmjs.com/package/fg-loadcss?notice=MIvGLZ2qXNAEF8AM1kvyFWL8p-1MwaU7UpJd8jcG var stylesheet = loadCSS( "styles/main.css" ); onloadCSS( stylesheet, function() { console.log( "LoadCSS > Stylesheet has loaded. Yay !" ); $('.no-fouc').fadeIn(); // Jquery animation }); // Load Hyphenopoly plugins, manage font césure & text FOUC // Need to be loaded beofre HyphenopolyLoader, cf. gulpfile paths.scripts.src var Hyphenopoly = { require: { "en-us": "hyphenation" }, paths: { patterndir: 'assets/hyphenopoly/patterns/', maindir: 'assets/hyphenopoly/' }, setup: { classnames: { "hyphenate": {} } } };
Use SyncEmailMixin for account mails
import re from allauth.account.adapter import DefaultAccountAdapter from django.conf import settings from adhocracy4.emails.mixins import SyncEmailMixin from apps.contrib.emails import Email from apps.users import USERNAME_INVALID_MESSAGE from apps.users import USERNAME_REGEX class UserAccountEmail(Email, SyncEmailMixin): def get_receivers(self): return [self.object] @property def template_name(self): return self.kwargs['template_name'] def get_context(self): context = super().get_context() context['contact_email'] = settings.CONTACT_EMAIL return context class AccountAdapter(DefaultAccountAdapter): username_regex = re.compile(USERNAME_REGEX) error_messages = dict( DefaultAccountAdapter.error_messages, invalid_username=USERNAME_INVALID_MESSAGE ) def send_mail(self, template_prefix, email, context): user = context['user'] return UserAccountEmail.send( user, template_name=template_prefix, **context )
import re from allauth.account.adapter import DefaultAccountAdapter from django.conf import settings from apps.contrib.emails import Email from apps.users import USERNAME_INVALID_MESSAGE from apps.users import USERNAME_REGEX class UserAccountEmail(Email): def get_receivers(self): return [self.object] @property def template_name(self): return self.kwargs['template_name'] def get_context(self): context = super().get_context() context['contact_email'] = settings.CONTACT_EMAIL return context class AccountAdapter(DefaultAccountAdapter): username_regex = re.compile(USERNAME_REGEX) error_messages = dict( DefaultAccountAdapter.error_messages, invalid_username=USERNAME_INVALID_MESSAGE ) def send_mail(self, template_prefix, email, context): user = context['user'] return UserAccountEmail.send_sync( user, template_name=template_prefix, **context )
Fix super with this call, was missing the status code
package org.iotbricks.core.binding.proton; import java.util.Collections; import java.util.Map; import java.util.Optional; import java.util.function.Consumer; import java.util.function.Function; import org.apache.qpid.proton.amqp.messaging.ApplicationProperties; import org.apache.qpid.proton.message.Message; import org.iotbricks.core.binding.amqp.AmqpRequestContext; import org.iotbricks.core.binding.common.MessageResponseHandler; /** * A specialized {@link MessageResponseHandler} for errors. * <p> * This handler also sets the application property {@code status} to {@code 500} * on the proton message. Indicating an error to the receiver. */ public class ProtonErrorMessageResponseHandler extends MessageResponseHandler<ProtonRequestContext, Message> { private static final Consumer<Message> SET_TO_500 = message -> { final Map<String, ?> properties = Collections.singletonMap("status", 500); message.setApplicationProperties(new ApplicationProperties(properties)); }; public ProtonErrorMessageResponseHandler( final Function<ProtonRequestContext, Optional<String>> responseAddressProvider) { super(responseAddressProvider, SET_TO_500); } public ProtonErrorMessageResponseHandler() { this(AmqpRequestContext::getReplyToAddress); } }
package org.iotbricks.core.binding.proton; import java.util.Collections; import java.util.Map; import java.util.Optional; import java.util.function.Consumer; import java.util.function.Function; import org.apache.qpid.proton.amqp.messaging.ApplicationProperties; import org.apache.qpid.proton.message.Message; import org.iotbricks.core.binding.amqp.AmqpRequestContext; import org.iotbricks.core.binding.common.MessageResponseHandler; /** * A specialized {@link MessageResponseHandler} for errors. * <p> * This handler also sets the application property {@code status} to {@code 500} * on the proton message. Indicating an error to the receiver. */ public class ProtonErrorMessageResponseHandler extends MessageResponseHandler<ProtonRequestContext, Message> { private static final Consumer<Message> SET_TO_500 = message -> { final Map<String, ?> properties = Collections.singletonMap("status", 500); message.setApplicationProperties(new ApplicationProperties(properties)); }; public ProtonErrorMessageResponseHandler( final Function<ProtonRequestContext, Optional<String>> responseAddressProvider) { super(responseAddressProvider, SET_TO_500); } public ProtonErrorMessageResponseHandler() { super(AmqpRequestContext::getReplyToAddress); } }
Add checks for null and streaming files
var es = require('event-stream'); var stylus = require('stylus'); var gutil = require('gulp-util'); var path = require('path'); module.exports = function (options) { var opts = options ? options : {}; var paths = opts.paths || []; function stylusstream (file, cb) { // file is on object passed in by gulp // TODO: support streaming files if (file.isNull()) return cb(null, file); // pass along if (file.isStream()) return cb(new Error("gulp-stylus: Streaming not supported")); var s = stylus(file.contents.toString('utf8')); s.set('filename', file.path); s.set('paths', paths.concat([path.dirname(file.path)])); //trying to load extensions from array passed by user if (options && options.use && options.use.length > 0){ s.use(function(stylus){ try{ options.use.forEach(function(args){ stylus.use(require(args)()); }); } catch(e){} }); } s.render(function(err, css){ if (err) return cb(err); file.path = gutil.replaceExtension(file.path, '.css'); file.contents = new Buffer(css); cb(null, file); }); } return es.map(stylusstream); };
var es = require('event-stream'); var stylus = require('stylus'); var gutil = require('gulp-util'); var path = require('path'); module.exports = function (options) { var opts = options ? options : {}; var paths = opts.paths || []; function stylusstream (file, cb) { // file is on object passed in by gulp // TODO: support streaming files var s = stylus(file.contents.toString('utf8')); s.set('filename', file.path); s.set('paths', paths.concat([path.dirname(file.path)])); //trying to load extensions from array passed by user if (options && options.use && options.use.length > 0){ s.use(function(stylus){ try{ options.use.forEach(function(args){ stylus.use(require(args)()); }); } catch(e){} }); } s.render(function(err, css){ if (err) return cb(err); file.path = gutil.replaceExtension(file.path, '.css'); file.contents = new Buffer(css); cb(null, file); }); } return es.map(stylusstream); };
Allow at least 5 descriptors if detection goes wrong
'use strict'; var toUint = require('es5-ext/lib/Number/to-uint') , count = 0, limit = Infinity, callbacks = []; exports.open = function () { ++count; }; exports.close = function () { if ((--count < limit) && callbacks.length) { do { callbacks.shift()(); } while ((count < limit) && callbacks.length); } }; require('child_process').exec('ulimit -n', { env: process.env }, function (err, stdout, stderr) { if (!stdout) { return; } stdout = stdout.trim(); if (isNaN(stdout)) { return; } // We subtract 25 to give eventual outer processes some air limit = Math.max(Number(stdout) - 25, 5); }); exports.isAvailable = function (padding) { return count < (limit - toUint(padding)); }; exports.cb = function (cb) { if (count < limit) { cb(); return; } callbacks.push(cb); };
'use strict'; var toUint = require('es5-ext/lib/Number/to-uint') , count = 0, limit = Infinity, callbacks = []; exports.open = function () { ++count; }; exports.close = function () { if ((--count < limit) && callbacks.length) { do { callbacks.shift()(); } while ((count < limit) && callbacks.length); } }; require('child_process').exec('ulimit -n', { env: process.env }, function (err, stdout, stderr) { if (!stdout) { return; } stdout = stdout.trim(); if (isNaN(stdout)) { return; } // We subtract 25 to give eventual outer processes some air limit = Number(stdout) - 25; }); exports.isAvailable = function (padding) { return count < (limit - toUint(padding)); }; exports.cb = function (cb) { if (count < limit) { cb(); return; } callbacks.push(cb); };
Add period at end of plug-in description All other plug-in descriptions have that too. So for consistency. Contributes to issue CURA-1190.
# Copyright (c) 2015 Ultimaker B.V. # Cura is released under the terms of the AGPLv3 or higher. from . import GCodeWriter from UM.i18n import i18nCatalog catalog = i18nCatalog("cura") def getMetaData(): return { "plugin": { "name": catalog.i18nc("@label", "GCode Writer"), "author": "Ultimaker", "version": "1.0", "description": catalog.i18nc("@info:whatsthis", "Writes GCode to a file."), "api": 2 }, "mesh_writer": { "output": [{ "extension": "gcode", "description": catalog.i18nc("@item:inlistbox", "GCode File"), "mime_type": "text/x-gcode", "mode": GCodeWriter.GCodeWriter.OutputMode.TextMode }] } } def register(app): return { "mesh_writer": GCodeWriter.GCodeWriter() }
# Copyright (c) 2015 Ultimaker B.V. # Cura is released under the terms of the AGPLv3 or higher. from . import GCodeWriter from UM.i18n import i18nCatalog catalog = i18nCatalog("cura") def getMetaData(): return { "plugin": { "name": catalog.i18nc("@label", "GCode Writer"), "author": "Ultimaker", "version": "1.0", "description": catalog.i18nc("@info:whatsthis", "Writes GCode to a file"), "api": 2 }, "mesh_writer": { "output": [{ "extension": "gcode", "description": catalog.i18nc("@item:inlistbox", "GCode File"), "mime_type": "text/x-gcode", "mode": GCodeWriter.GCodeWriter.OutputMode.TextMode }] } } def register(app): return { "mesh_writer": GCodeWriter.GCodeWriter() }
Use a decorator to wrap the actual checks
import os import uuid import time import json from flask import Flask app = Flask(__name__) app.debug = True def check(endpoint): def actual_decorator(func): def actual_check(): start_time = time.time() try: ret = func() except: # FIXME: log this error somewhere ret = False total_time = time.time() - start_time return json.dumps({ 'status': ret, 'time': total_time }) return app.route(endpoint)(actual_check) return actual_decorator @check('/nfs/home') def nfs_home_check(): content = str(uuid.uuid4()) path = os.path.join('/data/project/canary/nfs-test/', content) try: with open(path, 'w') as f: f.write(content) with open(path) as f: actual_content = f.read() if actual_content == content: return True return False finally: os.remove(path)
import os import uuid import time import json from flask import Flask app = Flask(__name__) app.debug = True def check(endpoint): def actual_check(function): start_time = time.time() ret = function() total_time = time.time() - start_time return json.dumps({ 'status': ret, 'time': total_time }) return app.route(endpoint)(actual_check) @check('/nfs/home') def nfs_home_check(): content = str(uuid.uuid4()) path = os.path.join('/data/project/canary/nfs-test/', content) try: with open(path, 'w') as f: f.write(content) with open(path) as f: actual_content = f.read() if actual_content == content: return True return False finally: os.remove(path)
Simplify regex in url matching
from django.conf.urls import url from modelview import views from oeplatform import settings from django.conf.urls.static import static urlpatterns = [ url(r'^(?P<sheettype>[\w\d_]+)s/$', views.listsheets, {}, name='modellist'), url(r'^overview/$', views.overview, {}), url(r'^(?P<sheettype>[\w\d_]+)s/add/$', views.FSAdd.as_view(), {'method':'add'}, name='modellist'), url(r'^(?P<sheettype>[\w\d_]+)s/download/$', views.model_to_csv, {}, name='index'), url(r'^(?P<sheettype>[\w\d_]+)s/(?P<model_name>[\d]+)/$', views.show, {}, name='index'), url(r'^(?P<sheettype>[\w\d_]+)s/(?P<model_name>[\d]+)/edit/$', views.editModel, {}, name='index'), url(r'^(?P<sheettype>[\w\d_]+)s/(?P<pk>[\d]+)/update/$', views.FSAdd.as_view(), {'method':'update'}, name='index'), ]
from django.conf.urls import url from modelview import views from oeplatform import settings from django.conf.urls.static import static urlpatterns = [ url(r'^(?P<sheettype>[\w\d_]+)s/$', views.listsheets, {}, name='modellist'), url(r'^overview/$', views.overview, {}), url(r'^(?P<sheettype>[\w\d_]+)s/add/$', views.FSAdd.as_view(), {'method':'add'}, name='modellist'), url(r'^(?P<sheettype>[\w\d_]+)s/download/$', views.model_to_csv, {}, name='index'), url(r'^(?P<sheettype>[\w\d_]+)s/(?P<model_name>[\w\d_]+)/$', views.show, {}, name='index'), url(r'^(?P<sheettype>[\w\d_]+)s/(?P<model_name>[\w\d_]+)/edit/$', views.editModel, {}, name='index'), url(r'^(?P<sheettype>[\w\d_]+)s/(?P<pk>[\w\d_]+)/update/$', views.FSAdd.as_view(), {'method':'update'}, name='index'), ]
Use argparse helper to add default values
import argparse from alerta.app import app from alerta.app import db from alerta.version import __version__ LOG = app.logger def main(): parser = argparse.ArgumentParser( prog='alertad', description='Alerta server (for development purposes only)', formatter_class=argparse.ArgumentDefaultsHelpFormatter ) parser.add_argument( '-H', '--host', type=str, default='0.0.0.0', help='Bind host' ) parser.add_argument( '-P', '--port', type=int, default=8080, help='Listen port' ) parser.add_argument( '--debug', action='store_true', default=False, help='Debug output' ) args = parser.parse_args() LOG.info('Starting alerta version %s ...', __version__) LOG.info('Using MongoDB version %s ...', db.get_version()) app.run(host=args.host, port=args.port, debug=args.debug, threaded=True)
import argparse from alerta.app import app from alerta.app import db from alerta.version import __version__ LOG = app.logger def main(): parser = argparse.ArgumentParser( prog='alertad', description='Alerta server (for development purposes only)' ) parser.add_argument( '-P', '--port', type=int, default=8080, help='Listen port (default: 8080)' ) parser.add_argument( '-H', '--host', type=str, default='0.0.0.0', help='Bind host (default: 0.0.0.0)' ) parser.add_argument( '--debug', action='store_true', default=False, help='Debug output' ) args = parser.parse_args() LOG.info('Starting alerta version %s ...', __version__) LOG.info('Using MongoDB version %s ...', db.get_version()) app.run(host=args.host, port=args.port, debug=args.debug, threaded=True)
Remove unnecessary Object declaration in type parameter Change-Id: I466a2d43e9f9d3effd3860761a97855110b71781
/* * Copyright 2014 Open Networking Laboratory * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.onosproject.event; /** * Abstraction of an of a time-stamped event pertaining to an arbitrary subject. */ public interface Event<T extends Enum, S> { /** * Returns the timestamp of when the event occurred, given in milliseconds * since the start of epoch. * * @return timestamp in milliseconds */ long time(); /** * Returns the type of the event. * * @return event type */ T type(); /** * Returns the subject of the event. * * @return subject to which this event pertains */ S subject(); }
/* * Copyright 2014 Open Networking Laboratory * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.onosproject.event; /** * Abstraction of an of a time-stamped event pertaining to an arbitrary subject. */ public interface Event<T extends Enum, S extends Object> { /** * Returns the timestamp of when the event occurred, given in milliseconds * since the start of epoch. * * @return timestamp in milliseconds */ long time(); /** * Returns the type of the event. * * @return event type */ T type(); /** * Returns the subject of the event. * * @return subject to which this event pertains */ S subject(); }
Allow specifying test.py flags in 'inv test'
from os import mkdir from os.path import join from shutil import rmtree, copytree from invoke import Collection, ctask as task from invocations.docs import docs, www from invocations.packaging import publish # Until we move to spec-based testing @task def test(ctx, coverage=False, flags=""): if "--verbose" not in flags.split(): flags += " --verbose" runner = "python" if coverage: runner = "coverage run --source=paramiko" ctx.run("{0} test.py {1}".format(runner, flags), pty=True) @task def coverage(ctx): ctx.run("coverage run --source=paramiko test.py --verbose") # Until we stop bundling docs w/ releases. Need to discover use cases first. @task def release(ctx): # Build docs first. Use terribad workaround pending invoke #146 ctx.run("inv docs") # Move the built docs into where Epydocs used to live target = 'docs' rmtree(target, ignore_errors=True) # TODO: make it easier to yank out this config val from the docs coll copytree('sites/docs/_build', target) # Publish publish(ctx) # Remind print("\n\nDon't forget to update RTD's versions page for new minor releases!") ns = Collection(test, coverage, release, docs, www)
from os import mkdir from os.path import join from shutil import rmtree, copytree from invoke import Collection, ctask as task from invocations.docs import docs, www from invocations.packaging import publish # Until we move to spec-based testing @task def test(ctx, coverage=False): runner = "python" if coverage: runner = "coverage run --source=paramiko" flags = "--verbose" ctx.run("{0} test.py {1}".format(runner, flags), pty=True) @task def coverage(ctx): ctx.run("coverage run --source=paramiko test.py --verbose") # Until we stop bundling docs w/ releases. Need to discover use cases first. @task def release(ctx): # Build docs first. Use terribad workaround pending invoke #146 ctx.run("inv docs") # Move the built docs into where Epydocs used to live target = 'docs' rmtree(target, ignore_errors=True) # TODO: make it easier to yank out this config val from the docs coll copytree('sites/docs/_build', target) # Publish publish(ctx) # Remind print("\n\nDon't forget to update RTD's versions page for new minor releases!") ns = Collection(test, coverage, release, docs, www)
Switch to using dbSession in db.py instead of baseInterface.py This is another file that should have been included in PR #272, where we transitioned all existing non-Flask db access to a db connection using the new contextmanager. Originally missed this one because it *is* using a contextmanager, but it's using one in the deprecated baseInterface.py instead of the newer db.py.
import logging import sys from dataactcore.interfaces.db import databaseSession from dataactbroker.fsrs import ( configValid, fetchAndReplaceBatch, GRANT, PROCUREMENT) logger = logging.getLogger(__name__) if __name__ == '__main__': logging.basicConfig(level=logging.INFO) with databaseSession() as sess: if not configValid(): logger.error("No config for broker/fsrs/[service]/wsdl") sys.exit(1) else: procs = fetchAndReplaceBatch(sess, PROCUREMENT) grants = fetchAndReplaceBatch(sess, GRANT) awards = procs + grants numSubAwards = sum(len(a.subawards) for a in awards) logger.info("Inserted/Updated %s awards, %s subawards", len(awards), numSubAwards)
import logging import sys from dataactcore.models.baseInterface import databaseSession from dataactbroker.fsrs import ( configValid, fetchAndReplaceBatch, GRANT, PROCUREMENT) logger = logging.getLogger(__name__) if __name__ == '__main__': logging.basicConfig(level=logging.INFO) with databaseSession() as sess: if not configValid(): logger.error("No config for broker/fsrs/[service]/wsdl") sys.exit(1) else: procs = fetchAndReplaceBatch(sess, PROCUREMENT) grants = fetchAndReplaceBatch(sess, GRANT) awards = procs + grants numSubAwards = sum(len(a.subawards) for a in awards) logger.info("Inserted/Updated %s awards, %s subawards", len(awards), numSubAwards)
ZimForm: Fix mutliple anlage select in create Now its possible to add multiple anlagen to a zim when create/edit. It still gives an error when a anlage which was part of a zim is not selected (remove anlage from zim)
<?php /** * Zim form. * * @package openZIM * @subpackage form * @author Your name here * @version SVN: $Id: sfDoctrineFormTemplate.php 23810 2009-11-12 11:07:44Z Kris.Wallsmith $ */ class ZimForm extends BaseZimForm { public function configure() { $this->embedRelation('Anlagen'); $this->widgetSchema['Anlagen'] = new sfWidgetFormDoctrineChoice(array( 'model' => $this->getRelatedModelName('Anlagen'), 'multiple' => true, // 'label_associated' => 'Anlagen für dieses ZIM', // 'label_unassociated' => 'Verfügbare Anlagen', 'renderer_class' => 'sfWidgetFormSelectDoubleList')); $this->widgetSchema['Anlagen']->setDefault($this->getObject()->getAnlagen()); $this->validatorSchema['Anlagen'] = new sfValidatorDoctrineChoice(array( 'model' => $this->getRelatedModelName('Anlagen'), 'multiple' => true, 'required' => false )); } }
<?php /** * Zim form. * * @package openZIM * @subpackage form * @author Your name here * @version SVN: $Id: sfDoctrineFormTemplate.php 23810 2009-11-12 11:07:44Z Kris.Wallsmith $ */ class ZimForm extends BaseZimForm { public function configure() { $this->embedRelation('Anlagen'); $this->widgetSchema['Anlagen'] = new sfWidgetFormDoctrineChoice(array( 'model' => $this->getRelatedModelName('Anlagen'), 'multiple' => true, // 'label_associated' => 'Anlagen für dieses ZIM', // 'label_unassociated' => 'Verfügbare Anlagen', 'add_empty' => true, 'renderer_class' => 'sfWidgetFormSelectDoubleList')); $this->validatorSchema['Anlagen'] = new sfValidatorDoctrineChoice(array( 'model' => $this->getRelatedModelName('Anlagen'), 'required' => false )); } }
Fix database for testing environment
from .base_settings import * import os # SECURITY WARNING: don't run with debug turned on in production! DEBUG = False SECRET_KEY = '$1B&VUf$OdUEfMJXd40qdakA36@%2NE_41Dz9tFs6l=z4v_3P-' DATABASES = { 'default': { 'ENGINE': 'django.db.backends.postgresql', 'NAME': 'puzzlehunt_db', 'HOST': '127.0.0.1', 'USER': 'root', 'PASSWORD': '', } } INTERNAL_IPS = '' EMAIL_HOST_USER = '' EMAIL_HOST_PASSWORD = '' ALLOWED_HOSTS = ['*'] LOGGING = { 'version': 1, 'disable_existing_loggers': False, 'handlers': { 'console': { 'class': 'logging.StreamHandler', }, }, 'loggers': { 'django': { 'handlers': ['console'], 'level': os.getenv('DJANGO_LOG_LEVEL', 'INFO'), }, }, }
from .base_settings import * import os # SECURITY WARNING: don't run with debug turned on in production! DEBUG = False SECRET_KEY = '$1B&VUf$OdUEfMJXd40qdakA36@%2NE_41Dz9tFs6l=z4v_3P-' DATABASES = { 'default': { 'ENGINE': 'django.db.backends.mysql', 'NAME': 'puzzlehunt_db', 'HOST': '127.0.0.1', 'USER': 'root', 'PASSWORD': '', 'OPTIONS': {'charset': 'utf8mb4'}, } } INTERNAL_IPS = '' EMAIL_HOST_USER = '' EMAIL_HOST_PASSWORD = '' ALLOWED_HOSTS = ['*'] LOGGING = { 'version': 1, 'disable_existing_loggers': False, 'handlers': { 'console': { 'class': 'logging.StreamHandler', }, }, 'loggers': { 'django': { 'handlers': ['console'], 'level': os.getenv('DJANGO_LOG_LEVEL', 'INFO'), }, }, }
Update readme location following rename.
#!/usr/bin/env python # # coding: utf-8 from setuptools import setup, find_packages long_description = open('README.rst').read() setup( name='captainhook', description='A collection of git commit hooks', version='0.8.3', long_description=long_description, author='Alex Couper', author_email='info@alexcouper.com', url='https://github.com/alexcouper/captainhook', zip_safe=False, scripts=[ 'scripts/captainhook' ], install_requires=[ 'docopt==0.6.1', ], packages=find_packages(), classifiers=[ 'Development Status :: 4 - Beta', 'Intended Audience :: Developers', ('License :: OSI Approved :: GNU Library or Lesser ' 'General Public License (LGPL)'), 'Operating System :: MacOS', 'Operating System :: POSIX', 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3', 'Topic :: Software Development :: Libraries :: Python Modules', ], )
#!/usr/bin/env python # # coding: utf-8 from setuptools import setup, find_packages long_description = open('README.md').read() setup( name='captainhook', description='A collection of git commit hooks', version='0.8.3', long_description=long_description, author='Alex Couper', author_email='info@alexcouper.com', url='https://github.com/alexcouper/captainhook', zip_safe=False, scripts=[ 'scripts/captainhook' ], install_requires=[ 'docopt==0.6.1', ], packages=find_packages(), classifiers=[ 'Development Status :: 4 - Beta', 'Intended Audience :: Developers', ('License :: OSI Approved :: GNU Library or Lesser ' 'General Public License (LGPL)'), 'Operating System :: MacOS', 'Operating System :: POSIX', 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3', 'Topic :: Software Development :: Libraries :: Python Modules', ], )
Add tests for the 404 and 500 error handlers.
from django.core.urlresolvers import reverse from django.test import Client from django.test.client import RequestFactory from projects.models import Project from innovate import urls from innovate.views import handle404, handle500 def test_routes(): c = Client() for pattern in urls.urlpatterns: response = c.get(reverse(pattern.name)) assert response.status_code == 301 assert response.has_header('location') location = response.get('location', None) assert location is not None response = c.get(location) assert response.status_code == 200 def test_featured(): project = Project.objects.create( name=u'Test Project', slug=u'test-project', description=u'Blah', featured=True ) c = Client() response = c.get('/en-US/') assert response.status_code == 200 assert project.name in response.content def test_404_handler(): """Test that the 404 error handler renders and gives the correct code.""" response = handle404(RequestFactory().get('/not/a/real/path/')) assert response.status_code == 404 def test_500_handler(): """Test that the 500 error handler renders and gives the correct code.""" response = handle500(RequestFactory().get('/not/a/real/path/')) assert response.status_code == 500
from django.core.urlresolvers import reverse from django.test import Client from projects.models import Project from innovate import urls def test_routes(): c = Client() for pattern in urls.urlpatterns: response = c.get(reverse(pattern.name)) assert response.status_code == 301 assert response.has_header('location') location = response.get('location', None) assert location is not None response = c.get(location) assert response.status_code == 200 def test_featured(): project = Project.objects.create( name=u'Test Project', slug=u'test-project', description=u'Blah', featured=True ) c = Client() response = c.get('/en-US/') assert response.status_code == 200 assert project.name in response.content
Refactor 'one day to counter' (was actually -1, not +1)
<footer class="main-footer"> <div class="pull-right"> @if (isset($selectedDominion) && ($selectedDominion->round->isActive())) @php $diff = $selectedDominion->round->start_date->subDays(1)->diff(Carbon\Carbon::now()); $roundDurationInDays = $selectedDominion->round->start_date->diffInDays($selectedDominion->round->end_date); echo "Day <strong>{$diff->d}</strong>/{$roundDurationInDays}, hour <strong>{$diff->h}</strong>."; @endphp @endif <span class="hidden-xs">Version: </span>{!! $version !!} </div> <i class="fa fa-github"></i> <span class="hidden-xs">View this project on </span><a href="https://github.com/WaveHack/OpenDominion" target="_blank">GitHub <i class="fa fa-external-link"></i></a> </footer>
<footer class="main-footer"> <div class="pull-right"> @if (isset($selectedDominion) && ($selectedDominion->round->isActive())) @php $diff = $selectedDominion->round->start_date->diff(Carbon\Carbon::now()); $plusoneday = $diff->days + 1; $roundDurationInDays = $selectedDominion->round->start_date->diffInDays($selectedDominion->round->end_date); echo "Day <strong>{$plusoneday}</strong>/{$roundDurationInDays}, hour <strong>{$diff->h}</strong>."; @endphp @endif <span class="hidden-xs">Version: </span>{!! $version !!} </div> <i class="fa fa-github"></i> <span class="hidden-xs">View this project on </span><a href="https://github.com/WaveHack/OpenDominion" target="_blank">GitHub <i class="fa fa-external-link"></i></a> </footer>
Fix package and method names in comments
// This is a "stub" file. It's a little start on your solution. // It's not a complete solution though; you have to write some code. // Package triangle should have a package comment that summarizes what it's about. // https://golang.org/doc/effective_go.html#commentary package triangle // Notice KindFromSides() returns this type. Pick a suitable data type. type Kind const ( // Pick values for the following identifiers used by the test program. NaT // not a triangle Equ // equilateral Iso // isosceles Sca // scalene ) // KindFromSides should have a comment documenting it. func KindFromSides(a, b, c float64) Kind { // Write some code here to pass the test suite. // Then remove all the stock comments. // They're here to help you get started but they only clutter a finished solution. // If you leave them in, reviewers may protest! var k Kind return k }
// This is a "stub" file. It's a little start on your solution. // It's not a complete solution though; you have to write some code. // Package twofer should have a package comment that summarizes what it's about. // https://golang.org/doc/effective_go.html#commentary package triangle // Notice KindFromSides() returns this type. Pick a suitable data type. type Kind const ( // Pick values for the following identifiers used by the test program. NaT // not a triangle Equ // equilateral Iso // isosceles Sca // scalene ) // ShareWith should have a comment documenting it. func KindFromSides(a, b, c float64) Kind { // Write some code here to pass the test suite. // Then remove all the stock comments. // They're here to help you get started but they only clutter a finished solution. // If you leave them in, reviewers may protest! var k Kind return k }
Fix location of song info metadata
import { setMetadata } from './actions/metadata' function _loadMetadata(store) { const songRequire = require.context('../songs/', true, /(info|song|package)\.json$/) const metaData = songRequire('./info.json') metaData.songs = {} songRequire.keys().forEach(name => { const parts = name.split('/') const filename = parts[parts.length - 1] if (filename === 'info.json') { return } const songKey = parts[1] metaData.songs[songKey] = metaData.songs[songKey] || {} if (filename === 'song.json') { Object.assign(metaData.songs[songKey], songRequire(name)) } else if (filename === 'package.json') { const trackKey = parts[2] metaData.songs[songKey].tracks = metaData.songs[songKey].tracks || {} metaData.songs[songKey].tracks[trackKey] = songRequire(name) } }) store.dispatch(setMetadata(metaData)) return songRequire } export default function loadMetadata(store) { const songRequire = _loadMetadata(store) if (module.hot) { module.hot.accept(songRequire.id, () => { loadMetadata(store) }) } }
import { setMetadata } from './actions/metadata' function _loadMetadata(store) { const songRequire = require.context('../songs/', true, /(info|song|package)\.json$/) const metaData = songRequire('./info.json') metaData.songs = {} songRequire.keys().forEach(name => { const parts = name.split('/') const filename = parts[parts.length - 1] if (filename === 'info.json') { return } const songKey = parts[1] metaData.songs[songKey] = metaData.songs[songKey] || {} if (filename === 'song.json') { metaData.songs[songKey].info = songRequire(name) } else if (filename === 'package.json') { const trackKey = parts[2] metaData.songs[songKey].tracks = metaData.songs[songKey].tracks || {} metaData.songs[songKey].tracks[trackKey] = songRequire(name) } }) store.dispatch(setMetadata(metaData)) return songRequire } export default function loadMetadata(store) { const songRequire = _loadMetadata(store) if (module.hot) { module.hot.accept(songRequire.id, () => { loadMetadata(store) }) } }
Add support for hooking retryablehttp request logs
package azure import ( "io/ioutil" "log" "github.com/hashicorp/go-retryablehttp" "net/http" ) type Client struct { logger *log.Logger BaseURL string subscriptionID string tokenRequester *tokenRequester httpClient *retryablehttp.Client } func NewClient(creds *AzureResourceManagerCredentials) (*Client, error) { defaultLogger := log.New(ioutil.Discard, "", 0) httpClient := retryablehttp.NewClient() httpClient.Logger = defaultLogger tr := newTokenRequester(httpClient, creds.ClientID, creds.ClientSecret, creds.TenantID) return &Client{ BaseURL: "https://management.azure.com", subscriptionID: creds.SubscriptionID, httpClient: httpClient, tokenRequester: tr, logger: defaultLogger, }, nil } func (c *Client) SetRequestLoggingHook(hook func (*log.Logger, *http.Request, int)) { c.httpClient.RequestLogHook = hook } func (c *Client) SetLogger(newLogger *log.Logger) { c.logger = newLogger c.httpClient.Logger = newLogger } func (c *Client) NewRequest() *Request { return &Request{ client: c, } } func (c *Client) NewRequestForURI(resourceURI string) *Request { return &Request{ URI: &resourceURI, client: c, } }
package azure import ( "io/ioutil" "log" "github.com/hashicorp/go-retryablehttp" ) type Client struct { logger *log.Logger BaseURL string subscriptionID string tokenRequester *tokenRequester httpClient *retryablehttp.Client } func NewClient(creds *AzureResourceManagerCredentials) (*Client, error) { defaultLogger := log.New(ioutil.Discard, "", 0) httpClient := retryablehttp.NewClient() httpClient.Logger = defaultLogger tr := newTokenRequester(httpClient, creds.ClientID, creds.ClientSecret, creds.TenantID) return &Client{ BaseURL: "https://management.azure.com", subscriptionID: creds.SubscriptionID, httpClient: httpClient, tokenRequester: tr, logger: defaultLogger, }, nil } func (c *Client) SetLogger(newLogger *log.Logger) { c.logger = newLogger c.httpClient.Logger = newLogger } func (c *Client) NewRequest() *Request { return &Request{ client: c, } } func (c *Client) NewRequestForURI(resourceURI string) *Request { return &Request{ URI: &resourceURI, client: c, } }
Change ember app prefix to 'share/'
"""share URL Configuration The `urlpatterns` list routes URLs to views. For more information please see: https://docs.djangoproject.com/en/1.9/topics/http/urls/ Examples: Function views 1. Add an import: from my_app import views 2. Add a URL to urlpatterns: url(r'^$', views.home, name='home') Class-based views 1. Add an import: from other_app.views import Home 2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home') Including another URLconf 1. Import the include() function: from django.conf.urls import url, include 2. Add a URL to urlpatterns: url(r'^blog/', include('blog.urls')) """ from django.contrib import admin from django.conf.urls import url, include from django.conf import settings from django.views.generic.base import RedirectView from revproxy.views import ProxyView urlpatterns = [ url(r'^admin/', admin.site.urls), # url(r'^api-auth/', include('rest_framework.urls', namespace='rest_framework')), url(r'^api/', include('api.urls', namespace='api')), url(r'^o/', include('oauth2_provider.urls', namespace='oauth2_provider')), url(r'^accounts/', include('allauth.urls')), url(r'^(?P<path>share/.*)$', ProxyView.as_view(upstream=settings.EMBER_SHARE_URL)), url(r'^$', RedirectView.as_view(url='share/')), ]
"""share URL Configuration The `urlpatterns` list routes URLs to views. For more information please see: https://docs.djangoproject.com/en/1.9/topics/http/urls/ Examples: Function views 1. Add an import: from my_app import views 2. Add a URL to urlpatterns: url(r'^$', views.home, name='home') Class-based views 1. Add an import: from other_app.views import Home 2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home') Including another URLconf 1. Import the include() function: from django.conf.urls import url, include 2. Add a URL to urlpatterns: url(r'^blog/', include('blog.urls')) """ from django.contrib import admin from django.conf.urls import url, include from django.conf import settings from django.views.generic.base import RedirectView from revproxy.views import ProxyView urlpatterns = [ url(r'^admin/', admin.site.urls), # url(r'^api-auth/', include('rest_framework.urls', namespace='rest_framework')), url(r'^api/', include('api.urls', namespace='api')), url(r'^o/', include('oauth2_provider.urls', namespace='oauth2_provider')), url(r'^accounts/', include('allauth.urls')), url(r'^(?P<path>app/.*)$', ProxyView.as_view(upstream=settings.EMBER_SHARE_URL)), url(r'^$', RedirectView.as_view(url='app/discover')), ]
Make sure you can not instantiate class
package org.realityforge.replicant.shared.transport; public final class ReplicantContext { /** * Key used to retrieve an opaque identifier for the session from the ReplicantContextHolder. * Used to pass data from the servlet to the EJB. */ public static final String SESSION_ID_KEY = "SessionID"; /** * Key used to retrieve an opaque identifier for the request from the ReplicantContextHolder. * Used to pass data from the servlet to the EJB. */ public static final String REQUEST_ID_KEY = "RequestID"; /** * Key used to retrieve a flag whether the request produced a changeset relevant for the initiating session.. * Used to pass data from the EJB to the servlet. */ public static final String REQUEST_COMPLETE_KEY = "RequestComplete"; /** * HTTP request header to indicate the session id. */ public static final String SESSION_ID_HEADER = "X-GWT-SessionID"; /** * HTTP request header to indicate the request id. */ public static final String REQUEST_ID_HEADER = "X-GWT-RequestID"; /** * HTTP response header to indicate the whether the request is complete or a change set is expected. */ public static final String REQUEST_COMPLETE_HEADER = "X-GWT-RequestComplete"; private ReplicantContext() { } }
package org.realityforge.replicant.shared.transport; public class ReplicantContext { /** * Key used to retrieve an opaque identifier for the session from the ReplicantContextHolder. * Used to pass data from the servlet to the EJB. */ public static final String SESSION_ID_KEY = "SessionID"; /** * Key used to retrieve an opaque identifier for the request from the ReplicantContextHolder. * Used to pass data from the servlet to the EJB. */ public static final String REQUEST_ID_KEY = "RequestID"; /** * Key used to retrieve a flag whether the request produced a changeset relevant for the initiating session.. * Used to pass data from the EJB to the servlet. */ public static final String REQUEST_COMPLETE_KEY = "RequestComplete"; /** * HTTP request header to indicate the session id. */ public static final String SESSION_ID_HEADER = "X-GWT-SessionID"; /** * HTTP request header to indicate the request id. */ public static final String REQUEST_ID_HEADER = "X-GWT-RequestID"; /** * HTTP response header to indicate the whether the request is complete or a change set is expected. */ public static final String REQUEST_COMPLETE_HEADER = "X-GWT-RequestComplete"; }
Add form (forgot at commit before)
<div class="single"> <div class="panel warp"> <?php echo formOpen('user/signin'); ?> <h1>Sign In</h1> <div> <?php echo inputText('username', 'Username'); ?> </div> <div> <?php echo inputPassword('password', 'Password'); ?> </div> <div> <?php echo inputCheckbox('keep', 'Remember Me'); ?> </div> <div> <?php echo inputSubmit('Sign In'); ?> </div> <div> Don't have an account yet? <a href="<?php echo baseUrl(); ?>user/signup">Create an Account</a> </div> <?php echo formClose(); ?> </div> </div>
<div class="single"> <div class="panel warp"> <h1>Sign In</h1> <div> <?php echo inputText('username', 'Username'); ?> </div> <div> <?php echo inputPassword('password', 'Password'); ?> </div> <div> <?php echo inputCheckbox('keep', 'Remember Me'); ?> </div> <div> <?php echo inputSubmit('Sign In'); ?> </div> <div> Don't have an account yet? <a href="<?php echo baseUrl(); ?>user/signup">Create an Account</a> </div> </div> </div>
Add a separate endpoint for posting postcode lookups to
from django.conf.urls import patterns, include, url from django.contrib import admin from candidates.views import (ConstituencyPostcodeFinderView, ConstituencyDetailView, CandidacyView, CandidacyDeleteView, NewPersonView) admin.autodiscover() urlpatterns = patterns('', url(r'^$', ConstituencyPostcodeFinderView.as_view(), name='finder'), url(r'^lookup/postcode$', ConstituencyPostcodeFinderView.as_view(), name='lookup-postcode'), url(r'^constituency/(?P<constituency_name>.*)$', ConstituencyDetailView.as_view(), name='constituency'), url(r'^candidacy$', CandidacyView.as_view(), name='candidacy-create'), url(r'^candidacy/delete$', CandidacyDeleteView.as_view(), name='candidacy-delete'), url(r'^person$', NewPersonView.as_view(), name='person-create'), url(r'^admin/', include(admin.site.urls)), )
from django.conf.urls import patterns, include, url from django.contrib import admin from candidates.views import (ConstituencyPostcodeFinderView, ConstituencyDetailView, CandidacyView, CandidacyDeleteView, NewPersonView) admin.autodiscover() urlpatterns = patterns('', url(r'^$', ConstituencyPostcodeFinderView.as_view(), name='finder'), url(r'^constituency/(?P<constituency_name>.*)$', ConstituencyDetailView.as_view(), name='constituency'), url(r'^candidacy$', CandidacyView.as_view(), name='candidacy-create'), url(r'^candidacy/delete$', CandidacyDeleteView.as_view(), name='candidacy-delete'), url(r'^person$', NewPersonView.as_view(), name='person-create'), url(r'^admin/', include(admin.site.urls)), )
Modify carousel interval to 7500ms
var main = function() { $(".btn-projects").click(function() { $('html,body').animate({ scrollTop: $(".proj").offset().top }, 'slow'); }); $(".btn-about").click(function() { $('html,body').animate({ scrollTop: $(".bio").offset().top }, 'slow'); }); $('#myCarousel').carousel({ interval: 7500 }); $('.carousel .item').each(function() { var next = $(this).next(); if (!next.length) { next = $(this).siblings(':first'); } next.children(':first-child').clone().appendTo($(this)); if (next.next().length > 0) { next.next().children(':first-child').clone().appendTo($(this)); } else { $(this).siblings(':first').children(':first-child').clone().appendTo($(this)); } }); }; $(document).ready(main);
var main = function() { $(".btn-projects").click(function() { $('html,body').animate({ scrollTop: $(".proj").offset().top }, 'slow'); }); $(".btn-about").click(function() { $('html,body').animate({ scrollTop: $(".bio").offset().top }, 'slow'); }); $('#myCarousel').carousel({ interval: 10000 }); $('.carousel .item').each(function() { var next = $(this).next(); if (!next.length) { next = $(this).siblings(':first'); } next.children(':first-child').clone().appendTo($(this)); if (next.next().length > 0) { next.next().children(':first-child').clone().appendTo($(this)); } else { $(this).siblings(':first').children(':first-child').clone().appendTo($(this)); } }); }; $(document).ready(main);
:muscle: Add code for Map() instead of Array()
// Copyright (c) 2017 The Regents of the University of Michigan. // All Rights Reserved. Licensed according to the terms of the Revised // BSD License. See LICENSE.txt for details. module.exports = function(landscape) { let actions = new Map(); let tickCount = 0; let thingToDo = []; return { "tick": () => new Promise(function(resolve, reject) { try { tickCount += 1; if (thingToDo.length > 0) { let thing = thingToDo.pop(); landscape[thing.method].apply(landscape, thing.args); } resolve(); } catch(error) { reject(error); } }), "at": function(when, method) { thingToDo.push({ "when": when, "method": method, "args": [...arguments].slice(2) }); actions.set(when, { "when": when, "method": method, "args": [...arguments].slice(2) }); } }; };
// Copyright (c) 2017 The Regents of the University of Michigan. // All Rights Reserved. Licensed according to the terms of the Revised // BSD License. See LICENSE.txt for details. module.exports = function(landscape) { let thingToDo = []; return { "tick": () => new Promise(function(resolve, reject) { try { if (thingToDo.length > 0) { let thing = thingToDo.pop(); landscape[thing.method].apply(landscape, thing.args); } resolve(); } catch(error) { reject(error); } }), "at": function(when, method) { thingToDo.push({ "when": when, "method": method, "args": [...arguments].slice(2) }); } }; };
Test with even smaller files
import os from os.path import dirname, join import uuid import qiniu.conf import qiniu.io import qiniu.rs import qiniu.rsf QINIU_ACCESS_KEY = os.environ.get('QINIU_ACCESS_KEY') QINIU_SECRET_KEY = os.environ.get('QINIU_SECRET_KEY') QINIU_BUCKET_NAME = os.environ.get('QINIU_BUCKET_NAME') QINIU_BUCKET_DOMAIN = os.environ.get('QINIU_BUCKET_DOMAIN') qiniu.conf.ACCESS_KEY = QINIU_ACCESS_KEY qiniu.conf.SECRET_KEY = QINIU_SECRET_KEY QINIU_PUT_POLICY= qiniu.rs.PutPolicy(QINIU_BUCKET_NAME) def test_put_file(): ASSET_FILE_NAME = 'jquery-1.11.1.min.js' with open(join(dirname(__file__),'assets', ASSET_FILE_NAME), 'rb') as assset_file: text = assset_file.read() text = text[:len(text)/10] print "Test text: %s" % text token = QINIU_PUT_POLICY.token() ret, err = qiniu.io.put(token, join(str(uuid.uuid4()), ASSET_FILE_NAME), text) if err: raise IOError( "Error message: %s" % err)
import os from os.path import dirname, join import uuid import qiniu.conf import qiniu.io import qiniu.rs import qiniu.rsf QINIU_ACCESS_KEY = os.environ.get('QINIU_ACCESS_KEY') QINIU_SECRET_KEY = os.environ.get('QINIU_SECRET_KEY') QINIU_BUCKET_NAME = os.environ.get('QINIU_BUCKET_NAME') QINIU_BUCKET_DOMAIN = os.environ.get('QINIU_BUCKET_DOMAIN') qiniu.conf.ACCESS_KEY = QINIU_ACCESS_KEY qiniu.conf.SECRET_KEY = QINIU_SECRET_KEY QINIU_PUT_POLICY= qiniu.rs.PutPolicy(QINIU_BUCKET_NAME) def test_put_file(): ASSET_FILE_NAME = 'jquery-1.11.1.min.js' with open(join(dirname(__file__),'assets', ASSET_FILE_NAME), 'rb') as assset_file: text = assset_file.read() print "Test text: %s" % text token = QINIU_PUT_POLICY.token() ret, err = qiniu.io.put(token, join(str(uuid.uuid4()), ASSET_FILE_NAME), text) if err: raise IOError( "Error message: %s" % err)