code
stringlengths 46
37.2k
| language
stringclasses 9
values | AST_depth
int64 3
30
| alphanumeric_fraction
float64 0.2
0.91
| max_line_length
int64 13
399
| avg_line_length
float64 5.67
140
| num_lines
int64 7
299
| original_docstring
stringlengths 22
42.6k
| source
stringclasses 2
values |
|---|---|---|---|---|---|---|---|---|
public void onSave(Map<String,String> specifications) {
errorsLiveData.setValue(Objects.requireNonNull(deviceModelLiveData.getValue()).getData().isValid());
if (saveShipment.getValue() != null && errorsLiveData.getValue() != null && errorsLiveData.getValue().size() == 0) {
Shipment shipment = saveShipment.getValue();
shipment.setDeviceName(deviceModelLiveData.getValue().getData().getName());
shipment.setQuantity(deviceModelLiveData.getValue().getData().getProductions().get(0).getQuantity());
errorsLiveData.setValue(saveShipment.getValue().isValid());
}
if (errorsLiveData.getValue() != null && errorsLiveData.getValue().size() == 0) {
DeviceModel device = deviceModelLiveData.getValue().getData();
device.setSpecifications(specifications);
saveDeviceLiveData.setValue(device);
}
}
|
java
| 14
| 0.669584
| 124
| 64.357143
| 14
|
/**
* Initiates save process by handling validation and then sends of request through livedata
* transformation
*/
|
function
|
public MarketDepth Update(IEnumerable<Quote> quotes, DateTimeOffset lastChangeTime = default(DateTimeOffset))
{
if (quotes == null)
throw new ArgumentNullException(nameof(quotes));
var bids = Enumerable.Empty<Quote>();
var asks = Enumerable.Empty<Quote>();
foreach (var group in quotes.GroupBy(q => q.OrderDirection))
{
if (group.Key == Sides.Buy)
bids = group;
else
asks = group;
}
return Update(bids, asks, false, lastChangeTime);
}
|
c#
| 12
| 0.677755
| 109
| 31.133333
| 15
|
/// <summary>
/// Update the order book by new quotes.
/// </summary>
/// <param name="quotes">The new quotes.</param>
/// <param name="lastChangeTime">Last change time.</param>
/// <returns>Market depth.</returns>
/// <remarks>
/// The old quotes will be removed from the book.
/// </remarks>
|
function
|
def open_by_address(addr, csv_dir = None, csv_folder = 'tester',
instr_class = 'TestInstrument', cmd_name = 'commands.csv',
lookup_name = 'lookup.csv'):
configs = {}
configs['csv_directory'] = csv_dir
configs['cmd_name'] = cmd_name
configs['lookup_name'] = lookup_name
cmd_map = os.path.join(configs['csv_directory'],
csv_folder,
configs['cmd_name'])
lookup_file = os.path.join(configs['csv_directory'],
csv_folder,
configs['lookup_name'])
cmd_list, inst_comm, unconnected = init_instrument(
cmd_map, addr=addr, lookup=lookup_file)
InstrumentClass = getattr(instruments, instr_class)
name = 'tester'
return InstrumentClass(cmd_list, inst_comm, name, unconnected)
|
python
| 9
| 0.555172
| 79
| 47.388889
| 18
|
Open an instrument by address and optionally use the system config file
Parameters
----------
addr : dict
The address of the instrument as a dict; e.g. {'pyvisa': 'USB0::0x0957::0x17A9::MY52160418::INSTR'}
csv_dir : str
Base directory to the csv instrument command input files
csv_folder : str
Folder for the commands.csv and lookup.csv files
instr_class : str
The name of the class in instruments.py
cmd_name :
The name of the csv file with commands
lookup_name :
The name of the csv file with a lookup map
Returns
-------
typical usage (for pytests)
t = open_by_address({'no_interface': 'no_address'})
An instrument object
|
function
|
public abstract class Generator<T> implements
IGenerator<ICombinatoricsVector<T>> {
/**
* Creates an iterator for enumerating all generated objects/vectors
*
* @return The iterator over the generated objects/vectors
* @deprecated This method will removed in the near future. Use the method
* <code>iterator()</code> instead of this method
*/
@Deprecated
public Iterator<ICombinatoricsVector<T>> createIterator() {
return iterator();
}
/**
* Returns all generated vectors as a list
*
* @return List of all generated objects/vectors
*/
public List<ICombinatoricsVector<T>> generateAllObjects() {
return generateFilteredObjects(null);
}
/**
* Returns the generated vectors filtered by a filter
*
* @param filter
* The filter to be applied to the generated result
* @return The list of the filtered vectors
*/
public List<ICombinatoricsVector<T>> generateFilteredObjects(
IFilter<ICombinatoricsVector<T>> filter) {
List<ICombinatoricsVector<T>> list = new ArrayList<ICombinatoricsVector<T>>();
long index = 0;
for (ICombinatoricsVector<T> vector : this) {
if (filter == null || filter.accepted(index, vector))
list.add(vector);
index++;
}
return list;
}
/**
* Returns vectors as a list for specified range of indexes (from the
* <code>startIndex</code> to <code>stopIndex</code>)
*
* @return List of the generated objects/vectors
*/
public List<ICombinatoricsVector<T>> generateObjectsRange(int startIndex,
int stopIndex) {
assert (startIndex <= stopIndex);
List<ICombinatoricsVector<T>> list = new ArrayList<ICombinatoricsVector<T>>();
Iterator<ICombinatoricsVector<T>> iterator = this.iterator();
int index = 1;
while (iterator.hasNext()) {
if (index >= startIndex && index <= stopIndex) {
list.add(iterator.next());
} else if (index > stopIndex) {
return list;
} else {
iterator.next();
}
index++;
}
return list;
}
}
|
java
| 14
| 0.688608
| 80
| 27.637681
| 69
|
/**
* Abstract base class for all generic generators of the library
* <p>
* This is a library written on Java to resolve some combinatorics issues such
* as generating combinatorial objects (permutations, partitions, compositions,
* subsets, combinations and etc).
* <p>
* Type of the items should be specified as a parameter of generators and
* vectors.
* <p>
* There is a general pattern how to use the generators <blockquote>
*
* <pre>
* // create the initial vector or set
* ICombinatoricsVector<T> vector = Factory.createVector(new <T>[]{ elements } );
*
* // create a concrete generator
* Generator<T> generator = Factory.create<Concrete>Generator(vector);
*
* // iterate the generated objects
* for (ICombinatoricsVector<T> v : generator) {
* System.out.println( v );
* }
* </pre>
*
* </blockquote>
* <p>
*
* @author Dmytro Paukov
* @see ICombinatoricsVector
* @see Iterator
* @see Factory
* @version 2.0
* @param <T>
* Type of the elements in the generated vectors
*/
|
class
|
func updateCacheWithCurrentCluster(cache map[types.ID]*memberHealth, current []*membership.Member) map[types.ID]struct{} {
currentMemberMap := make(map[types.ID]struct{}, len(current))
for _, member := range current {
currentMemberMap[member.ID] = struct{}{}
if member != nil {
var healthStats *memberHealth
var ok bool
healthStats, ok = cache[member.ID]
if !ok {
healthStats = &memberHealth{
Discovered: time.Now(),
LastHealthy: time.Now(),
ClientURLs: []string{},
}
}
healthStats.Name = member.Name
if member.ClientURLs != nil {
healthStats.ClientURLs = member.ClientURLs[:]
}
cache[member.ID] = healthStats
}
}
return currentMemberMap
}
|
go
| 18
| 0.673759
| 122
| 28.416667
| 24
|
// updateCacheWithCurrentCluster will go through and insert current cluster members into the cache
// and update the client urls of existing cache members
|
function
|
public class Request<I> {
private I input;
private Object tag;
private String origin;
private Map<String, Object> extras;
private Request(Builder<I> builder) {
input = builder.input;
tag = builder.tag;
extras = builder.extras;
}
public I getInput() {
return input;
}
public Object getTag() {
return tag;
}
public Map<String, Object> getExtras() {
return extras;
}
/**
* Set the originator of the {@link Request}.
* Useful for logging.
*
* @param origin Originator
* @return This request
*/
Request<I> origin(String origin) {
this.origin = origin;
return this;
}
public String getOrigin() {
return origin;
}
@Override
public boolean equals(Object o) {
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
Request<?> request = (Request<?>) o;
return Objects.equals(input, request.input) &&
Objects.equals(tag, request.tag) &&
Objects.equals(origin, request.origin) &&
Objects.equals(extras, request.extras);
}
@Override
public int hashCode() {
return Objects.hash(input, tag, origin, extras);
}
@Override
public String toString() {
return "Request{" +
"input=" + input +
", tag=" + tag +
", origin='" + origin + '\'' +
", extras=" + extras +
'}';
}
/**
* Create a new {@link Builder} instance for building and configuring
* the {@link Request} object.
*
* @param input Input object
* @param <I> Input type
* @return Builder instance for creating the {@link Request}
*/
public static <I> Builder<I> newBuilder(I input) {
return new Builder<>(input);
}
/**
* Create a new {@link Builder} instance for building and configuring
* the {@link Request} object.
* The {@link Request#input} object will be set to null.
* Useful for use-cases which do not require any input.
*
* @param <I> Input type
* @return Builder instance for creating the {@link Request}
*/
public static <I> Builder<I> newBuilder() {
return new Builder<>(null);
}
/**
* Builder pattern for creating and configuring {@link Request} objects.
*
* @param <I> Input type
*/
public static class Builder<I> {
private I input;
private Object tag;
private Map<String, Object> extras;
Builder(I input) {
this.input = input;
}
/**
* Tag the {@link Request} with the passed object.
* Useful for better differentiation.
*
* @param tag Tag
* @return This builder
*/
public Builder<I> tag(Object tag) {
this.tag = tag;
return this;
}
/**
* Set the {@link Request} extra parameters.
*
* @param extras Extra parameters
* @return This builder
*/
public Builder<I> extras(Map<String, Object> extras) {
this.extras = extras;
return this;
}
/**
* Add an extra parameter to the built {@link Request}
*
* @param key Parameter key
* @param value Parameter value
* @return This builder
*/
public Builder<I> extra(String key, Object value) {
if (extras == null) {
extras = new HashMap<>();
}
extras.put(key, value);
return this;
}
/**
* Build the {@link Request} configured with data
* passed in the builder methods.
*
* @return Configured {@link Request} object
*/
public Request<I> build() {
return new Request<>(this);
}
}
}
|
java
| 17
| 0.514145
| 76
| 24.734177
| 158
|
/**
* Data holder, acting as a wrapper of the input object.
* Instances of this class are passed to use-case classes as inputs
* for the action/logic/mechanism they wrap/define.
* Additionally this class exposes ways of tagging and setting
* extra parameters to requests which can be used by use-cases
* and their decorators.
*
* @param <I> Input type
*/
|
class
|
public class ReduceMemoryFootprintTest {
private static final Logger LOG = LoggerFactory.getLogger(ReduceMemoryFootprintTest.class);
private static final String TEST_AMQ_BROKER_URI = "tcp://localhost:0";
private static final String TEST_QUEUE_NAME = "Reduce.Memory.Footprint.Test";
private static final String PROP_NAME = "prop_name";
private static final String PROP_VALUE = "test-value";
private String connectionURI;
private BrokerService broker;
@Before
public void setUp() throws Exception {
// create a broker
broker = createBroker();
broker.start();
broker.waitUntilStarted();
connectionURI = broker.getTransportConnectorByName("openwire").getPublishableConnectString();
}
@After
public void tearDown() throws Exception {
broker.stop();
broker.waitUntilStopped();
}
@Test(timeout = 60000)
public void testPropertyLostNonScheduled() throws Exception {
ConnectionFactory connectionFactory = new ActiveMQConnectionFactory(connectionURI);
Connection connection = connectionFactory.createConnection();
Session session = connection.createSession(true, Session.SESSION_TRANSACTED);
MessageProducer producer = session.createProducer(new ActiveMQQueue(TEST_QUEUE_NAME));
connection.start();
String messageText = createMessageText();
ActiveMQTextMessage message = new ActiveMQTextMessage();
// Try with non-scheduled
message.setStringProperty(PROP_NAME, PROP_VALUE);
message.setText(messageText);
producer.send(message);
session.commit();
LOG.info("Attempting to receive non-scheduled message");
Message receivedMessage = consumeMessages(connection);
assertNotNull(receivedMessage);
assertEquals("property should match", PROP_VALUE, receivedMessage.getStringProperty(PROP_NAME));
connection.close();
}
@Test(timeout = 60000)
public void testPropertyLostScheduled() throws Exception {
ConnectionFactory connectionFactory = new ActiveMQConnectionFactory(connectionURI);
Connection connection = connectionFactory.createConnection();
Session session = connection.createSession(true, Session.SESSION_TRANSACTED);
MessageProducer producer = session.createProducer(new ActiveMQQueue(TEST_QUEUE_NAME));
connection.start();
String messageText = createMessageText();
ActiveMQTextMessage message = new ActiveMQTextMessage();
// Try with scheduled
message.setStringProperty(PROP_NAME, PROP_VALUE);
message.setLongProperty(ScheduledMessage.AMQ_SCHEDULED_DELAY, 1000);
message.setText(messageText);
producer.send(message);
session.commit();
LOG.info("Attempting to receive scheduled message");
Message receivedMessage = consumeMessages(connection);
assertNotNull(receivedMessage);
assertEquals("property should match", PROP_VALUE, receivedMessage.getStringProperty(PROP_NAME));
connection.close();
}
private String createMessageText() {
StringBuffer buffer = new StringBuffer();
for (int i = 0; i < 50; i++) {
buffer.append("1234567890");
}
return buffer.toString();
}
private Message consumeMessages(Connection connection) {
Message message = null;
try {
Session session = connection.createSession(false, Session.AUTO_ACKNOWLEDGE);
MessageConsumer consumer = session.createConsumer(new ActiveMQQueue(TEST_QUEUE_NAME));
message = consumer.receive(45000);
} catch (Exception ex) {
fail("during consume message received exception " + ex.getMessage());
} finally {
}
return message;
}
private BrokerService createBroker() throws Exception {
BrokerService broker = new BrokerService();
// add the policy entries ~
PolicyMap policyMap = new PolicyMap();
List<PolicyEntry> entries = new ArrayList<PolicyEntry>();
PolicyEntry pe = new PolicyEntry();
// reduce memory footprint
pe.setReduceMemoryFootprint(true);
pe.setOptimizedDispatch(true);
pe.setQueue(">");
entries.add(pe);
policyMap.setPolicyEntries(entries);
broker.setDestinationPolicy(policyMap);
broker.deleteAllMessages();
broker.setSchedulerSupport(true);
broker.addConnector(TEST_AMQ_BROKER_URI).setName("openwire");
return broker;
}
}
|
java
| 14
| 0.674388
| 104
| 31.535211
| 142
|
/**
* Using the broker's scheduler and setting reduceMemoryFootprint="true" causes
* message properties to be lost.
*/
|
class
|
def copy_fragment_sizes_files(args):
fragment_sizes_files = [
(args.standard_waltz_pool_a, 'Standard_A'),
(args.unfiltered_waltz_pool_a, 'Unfiltered_A'),
(args.simplex_waltz_pool_a, 'Simplex_A'),
(args.duplex_waltz_pool_a, 'Duplex_A'),
(args.standard_waltz_pool_b, 'Standard_B'),
(args.unfiltered_waltz_pool_b, 'Unfiltered_B'),
(args.simplex_waltz_pool_b, 'Simplex_B'),
(args.duplex_waltz_pool_b, 'Duplex_B'),
]
fragment_sizes_files = [(outname, x[0], x[1]) for outname, x in zip(INSERT_SIZE_OUTPUT_FILE_NAMES, fragment_sizes_files)]
for dst, src, type in fragment_sizes_files:
frag_sizes_path = os.path.join(src, 'fragment-sizes.txt')
fragment_sizes_df = pd.read_csv(frag_sizes_path, sep='\t')
fragment_sizes_df = fragment_sizes_df[['FragmentSize', 'TotalFrequency', SAMPLE_ID_COLUMN]]
fragment_sizes_df = fragment_sizes_df.pivot('FragmentSize', SAMPLE_ID_COLUMN, 'TotalFrequency')
new_index = pd.Index(np.arange(1, 800), name='FragmentSize')
fragment_sizes_df = fragment_sizes_df.reindex(new_index).reset_index()
fragment_sizes_df = fragment_sizes_df.fillna(0)
to_csv(fragment_sizes_df,os.path.join('.', dst))
|
python
| 12
| 0.626843
| 125
| 60.428571
| 21
|
Copy the fragment-sizes.txt files from the Waltz output folders, and create a combined table for all bam types
Fragment Sizes graph comes from Unfiltered Bam, Pool A Targets
Todo: not clean
:param args:
:return:
|
function
|
QualType Sema::BuildArrayType(QualType T, ArrayType::ArraySizeModifier ASM,
Expr *ArraySize, unsigned Quals,
SourceRange Brackets, DeclarationName Entity) {
SourceLocation Loc = Brackets.getBegin();
if (getLangOptions().CPlusPlus) {
Note: function types are handled in the common path with C.
if (T->isReferenceType()) {
Diag(Loc, diag::err_illegal_decl_array_of_references)
<< getPrintableNameForEntity(Entity) << T;
return QualType();
}
if (T->isVoidType()) {
Diag(Loc, diag::err_illegal_decl_array_incomplete_type) << T;
return QualType();
}
if (RequireNonAbstractType(Brackets.getBegin(), T,
diag::err_array_of_abstract_type))
return QualType();
} else {
C99 6.7.5.2p1: If the element type is an incomplete or function type,
reject it (e.g. void ary[7], struct foo ary[7], void ary[7]())
if (RequireCompleteType(Loc, T,
diag::err_illegal_decl_array_incomplete_type))
return QualType();
}
if (T->isFunctionType()) {
Diag(Loc, diag::err_illegal_decl_array_of_functions)
<< getPrintableNameForEntity(Entity) << T;
return QualType();
}
if (T->getContainedAutoType()) {
Diag(Loc, diag::err_illegal_decl_array_of_auto)
<< getPrintableNameForEntity(Entity) << T;
return QualType();
}
if (const RecordType *EltTy = T->getAs<RecordType>()) {
If the element type is a struct or union that contains a variadic
array, accept it as a GNU extension: C99 6.7.2.1p2.
if (EltTy->getDecl()->hasFlexibleArrayMember())
Diag(Loc, diag::ext_flexible_array_in_array) << T;
} else if (T->isObjCObjectType()) {
Diag(Loc, diag::err_objc_array_of_interfaces) << T;
return QualType();
}
Do lvalue-to-rvalue conversions on the array size expression.
if (ArraySize && !ArraySize->isRValue())
DefaultLvalueConversion(ArraySize);
C99 6.7.5.2p1: The size expression shall have integer type.
TODO: in theory, if we were insane, we could allow contextual
conversions to integer type here.
if (ArraySize && !ArraySize->isTypeDependent() &&
!ArraySize->getType()->isIntegralOrUnscopedEnumerationType()) {
Diag(ArraySize->getLocStart(), diag::err_array_size_non_int)
<< ArraySize->getType() << ArraySize->getSourceRange();
return QualType();
}
llvm::APSInt ConstVal(Context.getTypeSize(Context.getSizeType()));
if (!ArraySize) {
if (ASM == ArrayType::Star)
T = Context.getVariableArrayType(T, 0, ASM, Quals, Brackets);
else
T = Context.getIncompleteArrayType(T, ASM, Quals);
} else if (ArraySize->isTypeDependent() || ArraySize->isValueDependent()) {
T = Context.getDependentSizedArrayType(T, ArraySize, ASM, Quals, Brackets);
} else if (!ArraySize->isIntegerConstantExpr(ConstVal, Context) ||
(!T->isDependentType() && !T->isIncompleteType() &&
!T->isConstantSizeType())) {
Per C99, a variable array is an array with either a non-constant
size or an element type that has a non-constant-size
T = Context.getVariableArrayType(T, ArraySize, ASM, Quals, Brackets);
} else {
C99 6.7.5.2p1: If the expression is a constant expression, it shall
have a value greater than zero.
if (ConstVal.isSigned() && ConstVal.isNegative()) {
if (Entity)
Diag(ArraySize->getLocStart(), diag::err_decl_negative_array_size)
<< getPrintableNameForEntity(Entity) << ArraySize->getSourceRange();
else
Diag(ArraySize->getLocStart(), diag::err_typecheck_negative_array_size)
<< ArraySize->getSourceRange();
return QualType();
}
if (ConstVal == 0) {
GCC accepts zero sized static arrays. We allow them when
we're not in a SFINAE context.
Diag(ArraySize->getLocStart(),
isSFINAEContext()? diag::err_typecheck_zero_array_size
: diag::ext_typecheck_zero_array_size)
<< ArraySize->getSourceRange();
} else if (!T->isDependentType() && !T->isVariablyModifiedType() &&
!T->isIncompleteType()) {
Is the array too large?
unsigned ActiveSizeBits
= ConstantArrayType::getNumAddressingBits(Context, T, ConstVal);
if (ActiveSizeBits > ConstantArrayType::getMaxSizeBits(Context))
Diag(ArraySize->getLocStart(), diag::err_array_too_large)
<< ConstVal.toString(10)
<< ArraySize->getSourceRange();
}
T = Context.getConstantArrayType(T, ConstVal, ASM, Quals);
}
If this is not C99, extwarn about VLA's and C99 array size modifiers.
if (!getLangOptions().C99) {
if (T->isVariableArrayType()) {
Prohibit the use of non-POD types in VLAs.
if (!T->isDependentType() &&
!Context.getBaseElementType(T)->isPODType()) {
Diag(Loc, diag::err_vla_non_pod)
<< Context.getBaseElementType(T);
return QualType();
}
Prohibit the use of VLAs during template argument deduction.
else if (isSFINAEContext()) {
Diag(Loc, diag::err_vla_in_sfinae);
return QualType();
}
Just extwarn about VLAs.
else
Diag(Loc, diag::ext_vla);
} else if (ASM != ArrayType::Normal || Quals != 0)
Diag(Loc,
getLangOptions().CPlusPlus? diag::err_c99_array_usage_cxx
: diag::ext_c99_array_usage);
}
return T;
}
|
c++
| 22
| 0.631541
| 79
| 42.690476
| 126
|
/// \brief Build an array type.
///
/// \param T The type of each element in the array.
///
/// \param ASM C99 array size modifier (e.g., '*', 'static').
///
/// \param ArraySize Expression describing the size of the array.
///
/// \param Loc The location of the entity whose type involves this
/// array type or, if there is no such entity, the location of the
/// type that will have array type.
///
/// \param Entity The name of the entity that involves the array
/// type, if known.
///
/// \returns A suitable array type, if there are no errors. Otherwise,
/// returns a NULL type.
|
function
|
private void retrieveHourlyGages(final Calendar startTime,
final Calendar endTime, final Map<Calendar, GageData> gageDataMap)
throws HPEFieldGenConfigurationException {
logger.info("Retrieving gage data spanning time period: {} to {} ...",
startTime.getTime().toString(), endTime.getTime().toString());
List<Hourlypc> hourlyPCList = HourlyPrecipLoaderUtil
.loadPCForTimePeriod(startTime, endTime, null, ts, true);
List<Hourlypp> hourlyPPList = HourlyPrecipLoaderUtil
.loadPPForTimePeriod(startTime, endTime, null, ts, true);
if (hourlyPCList.isEmpty() && hourlyPPList.isEmpty()) {
/*
* No precipitation data.
*/
if (numHoursRun > 1) {
logger.info(
"There is no precipitation data between: {} and {}.",
startTime.getTime().toString(),
endTime.getTime().toString());
}
}
/*
* "pointer" to an index in the hourlyPCList list.
*/
final int[] pHourlyPCIdx = new int[] { 0 };
/*
* "pointer" to an index in the hourlyPPList list.
*/
final int[] pHourlyPPIdx = new int[] { 0 };
final int[] pcRecordsCount = new int[] { hourlyPCList.size() };
final int[] ppRecordsCount = new int[] { hourlyPPList.size() };
/*
* hard-coded to 0.0 on line 178 of hpe_fieldgen/TEXT/read_gage_precip.c
*/
final float minPercent = 0.0f;
/*
* hard-coded to 1 (true) on line 188 of
* hpe_fieldgen/TEXT/read_gage_precip.c
*/
final boolean advance = true;
/*
* hard-coded to 1 on line 189 of hpe_fieldgen/TEXT/read_gage_precip.c
*/
final int duration = 1;
final Calendar totalEndTime = TimeUtil.newCalendar(startTime);
for (int i = 0; i < numHoursRun; i++) {
/* Compute the ending data retrieval time. */
totalEndTime.add(Calendar.HOUR_OF_DAY, 1);
while ((pHourlyPPIdx[0] < hourlyPPList.size())
|| (pHourlyPCIdx[0] < hourlyPCList.size())) {
final PrecipTotal precipTotal = PrecipUtil.getInstance()
.getTotalHourlyPrecip(hourlyPCList, pHourlyPCIdx,
hourlyPPList, pHourlyPPIdx,
totalEndTime.getTime(), duration, minPercent,
CommonHydroConstants.PRECIP_TS_RANK
| CommonHydroConstants.PRECIP_PP,
advance, pcRecordsCount, ppRecordsCount);
if (precipTotal.value >= 0.0
&& precipTotal.value != CommonHydroConstants.MISSING_PRECIP) {
if (CommonHydroConstants.PC.equals(precipTotal.getPe())) {
if (!rangeCheck(precipTotal.value, precipTotal.lid,
totalEndTime)) {
continue;
}
}
/*
* Retrieve the stations latitude and longitude and convert
* to obtain the station's HRAP coordinates.
*/
GageLocation gageLocation = gageLocationsGeoData
.getGageLocationMap().get(precipTotal.lid);
if (gageLocation == null) {
logger.warn(
"Could not successfully retrieve lat/lon for lid: {}.",
precipTotal.lid);
} else {
/*
* Convert the retrieved lat/lon to HRAP coordinates.
*/
final Coordinate latLonCoord = gageLocation
.getLatLonCoord();
final Coordinate hrapCoord = HPEFieldgenUtils
.convertLatLonToScaledHrap(latLonCoord.x,
latLonCoord.y, hrapGridFactor);
/*
* Truncate to find integer HRAP coordinates which gage
* is in. Translate the origin to the lowerleft corner
* of the MPE estimation domain, that is, convert from
* the global to the local HRAP grid.
*/
int irow = (int) hrapCoord.x;
int icol = (int) hrapCoord.y;
irow -= geoGridData.x;
icol -= geoGridData.y;
precipTotal.setValue((float) (precipTotal.getValue()
* TOTAL_PRECIP_VALUE_MULTIPLIER));
PrecipDataRecord precipP3 = new PrecipDataRecord();
precipP3.setValue(precipTotal.getValue());
precipP3.setX(icol);
precipP3.setY(irow);
precipP3.setId(precipTotal.getLid());
precipP3.setTs(precipTotal.getTs());
precipP3.setLatitude(latLonCoord.x);
precipP3.setLongitude(latLonCoord.y);
gageDataMap.get(totalEndTime).addGagePrecipP3(precipP3);
if ((irow >= 0) && (irow < geoGridData.height)
&& ((icol >= 0)
&& (icol < geoGridData.getWidth()))) {
/*
* TODO: consider (based on data usage) adding some
* type of 'within hrap' flag to the Precip Data
* Record object and just set it when within the
* hrap instead of duplicating the information in
* another data structure that is tracked
* separately.
*/
PrecipDataRecord precip = new PrecipDataRecord();
precip.setValue(precipTotal.getValue());
precip.setX(icol);
precip.setY(irow);
precip.setId(precipTotal.getLid());
precip.setTs(precipTotal.getTs());
precip.setLatitude(latLonCoord.x);
precip.setLongitude(latLonCoord.y);
gageDataMap.get(totalEndTime).addGagePrecip(precip);
logger.info(
"Loaded gage precip for: lid = {}, x = {}, y = {}, value(mm) = {} ({})",
gageLocation.getLid(), irow, icol,
precipTotal.getValue(),
totalEndTime.getTime().toString());
}
}
}
}
}
}
|
java
| 20
| 0.459388
| 108
| 52.266667
| 135
|
/**
* Retrieves the hourly PC and PP data for the specified start date/time to
* the specified end date/time. All data that is retrieved is placed in the
* specified gage data {@link Map} structure. Based on:
* hpe_fieldgen/TEXT/read_gage_precip.c.
*
* @param startTime
* the specified start date/time
* @param endTime
* the specified end date/time
* @param gageDataMap
* the specified gage data {@link Map} structure
* @throws HPEFieldGenConfigurationException
*/
|
function
|
def build(c, path="../..", name="testapp"):
if not os.name in ["nt", "posix"]:
print("Sorry. this only supports Posix (e.g. Linux, OSX) and Windows OS. ")
sys.exit()
path=os.path.normpath(path)
print("Building : -n {} -p {} ".format(name, path))
if os.path.exists(os.path.join(path, name)):
print("sorry, path {} exists".format(os.path.abspath(os.path.join(path, name))))
r=input(" .. type y or yes, to go ahead deleting the existing: {} ? : ".format(os.path.join(path,name)))
if r in ["y", "yes"]:
import shutil
r=shutil.rmtree(os.path.join(path,name))
print(40*"-")
print(" ..... deleted dir tree: {}".format(os.path.join(path, name)))
print(40*"-")
build_all(c,name, path)
else:
print(40*"-")
print(" ok, exiting...")
print(40*"-")
sys.exit()
else:
build_all(c,name, path)
|
python
| 15
| 0.512873
| 112
| 41.26087
| 23
|
Create a testapp from the current git version
generate_app -n <name> -p <path>
create the according venv
cd (abspath)
virtrualenv venv
activate the pip environment
win: venv\Scripts\activate
other: source venv/bin/activate
install the requirements
pip install -r requirements.txt
run the tests
cd tests
python runtests
run the server
python server.py
=> You can find the testresults here:
localhost:8080/testresults
|
function
|
public static void checkChildElementNames(Element element, List<String> allowedChildNodeNames) {
for (int i = 0; i < element.getChildCount(); i++) {
Node childNode = element.getChild(i);
if (childNode instanceof Text) {
if (childNode.getValue().trim().length() > 0
&& !allowedChildNodeNames.contains(TEXT)) {
}
} else if (childNode instanceof Element) {
String elementName = element.getAttribute(i).getLocalName();
if (!allowedChildNodeNames.contains(elementName)) {
throw new RuntimeException("Unknown element: "+elementName+" in element: "+element.getLocalName());
}
} else {
}
}
}
|
java
| 17
| 0.683465
| 104
| 38.75
| 16
|
/** checks that childNode names are in allowed list.
*
* @param element
* @param allowedChildNodeNames
* can include element names, #text, - other node types are always allowed
*
* @throws RuntimeException if unknown childElement
*/
|
function
|
public class ChromeKeyboardVisibilityDelegate extends SingleWindowKeyboardVisibilityDelegate {
/**
* Creates a new visibility delegate.
* @param activity A {@link WeakReference} to a {@link ChromeActivity}.
*/
public ChromeKeyboardVisibilityDelegate(WeakReference<Activity> activity) {
super(activity);
assert activity.get() instanceof ChromeActivity;
}
@Override
public @Nullable ChromeActivity getActivity() {
return (ChromeActivity) super.getActivity();
}
/**
* Hide only Android's soft keyboard. Keeps eventual keyboard replacements and extensions
* untouched. Usually, you will want to call {@link #hideKeyboard(View)}.
* @param view A focused {@link View}.
* @return True if the keyboard was visible before this call.
*/
public boolean hideSoftKeyboardOnly(View view) {
return hideAndroidSoftKeyboard(view);
}
/**
* Returns whether Android soft keyboard is showing and ignores all extensions/replacements.
* Usually, you will want to call {@link #isKeyboardShowing(Context, View)}.
* @param context A {@link Context} instance.
* @param view A {@link View}.
* @return Returns true if Android's soft keyboard is visible. Ignores extensions/replacements.
*/
public boolean isSoftKeyboardShowing(Context context, View view) {
return isAndroidSoftKeyboardShowing(context, view);
}
@Override
public boolean hideKeyboard(View view) {
ChromeActivity activity = getActivity();
boolean wasManualFillingViewShowing = false;
if (activity != null) {
wasManualFillingViewShowing =
activity.getManualFillingComponent().isFillingViewShown(view);
activity.getManualFillingComponent().hide();
}
return super.hideKeyboard(view) || wasManualFillingViewShowing;
}
@Override
public boolean isKeyboardShowing(Context context, View view) {
ChromeActivity activity = getActivity();
return super.isKeyboardShowing(context, view)
|| (activity != null
&& activity.getManualFillingComponent().isFillingViewShown(view));
}
}
|
java
| 12
| 0.675336
| 99
| 38.839286
| 56
|
/**
* A {@link SingleWindowKeyboardVisibilityDelegate} that considers UI elements of a
* {@link ChromeActivity} which amend or replace the keyboard.
*/
|
class
|
@Value.Immutable
@BuckStyleImmutable
abstract class AbstractRetryPolicy {
@Value.Default
public int getMaxRetries() {
return 2;
}
@Value.Default
public Backoff.Strategy getBackoffStrategy() {
return Backoff.constant(0);
}
@Value.Default
public Runnable getBeforeRetry() {
return () -> {};
}
@Value.Default
public boolean getRestartAllStreamingCalls() {
return false;
}
@Value.Default
public ScheduledExecutorService getExecutor() {
return Executors.newSingleThreadScheduledExecutor(
new ThreadFactoryBuilder().setNameFormat("retryer-%s").setDaemon(true).build());
}
}
|
java
| 13
| 0.712698
| 88
| 20.033333
| 30
|
/**
* Immutable policy describing how requests should be retried in terms of delay between retries, the
* executor to use, and maximum number of retries.
*/
|
class
|
def update_project_settings(session, repo, settings, user, from_api=False):
user_obj = get_user(session, user)
update = []
new_settings = repo.settings
for key in new_settings:
if key in settings:
if key == "Minimum_score_to_merge_pull-request":
try:
settings[key] = int(settings[key]) if settings[key] else -1
except (ValueError, TypeError):
raise pagure.exceptions.PagureException(
"Please enter a numeric value for the 'minimum "
"score to merge pull request' field."
)
elif key == "Web-hooks":
settings[key] = settings[key] or None
else:
settings[key] = settings[key] in ["y", True]
if new_settings[key] != settings[key]:
update.append(key)
new_settings[key] = settings[key]
else:
if from_api:
val = new_settings[key]
else:
val = False
if key == "Web-hooks":
val = None
if new_settings[key] != val:
update.append(key)
new_settings[key] = val
if not update:
return "No settings to change"
else:
repo.settings = new_settings
repo.date_modified = datetime.datetime.utcnow()
session.add(repo)
session.flush()
pagure.lib.notify.log(
repo,
topic="project.edit",
msg=dict(
project=repo.to_json(public=True),
fields=sorted(update),
agent=user_obj.username,
),
)
if "pull_request_access_only" in update:
update_read_only_mode(session, repo, read_only=True)
session.add(repo)
session.flush()
pagure.lib.git.generate_gitolite_acls(project=repo)
return "Edited successfully settings of repo: %s" % repo.fullname
|
python
| 17
| 0.507084
| 79
| 37.641509
| 53
|
Update the settings of a project.
If from_api is true, all values that are not specified will be changed
back to their default value.
Otherwise, if from_api is False, all non-specified values are assumed
to be set to ``False`` or ``None``.
|
function
|
func LoggingMiddleware(h Handler) Handler {
return HandlerFunc(func(ctx context.Context, w ResponseWriter, r *Request) {
lw := &logResponseWriter{rw: w}
h.ServeGemini(ctx, lw, r)
host := r.ServerName()
log.Printf("gemini: %s %q %d %d", host, r.URL, lw.Status, lw.Wrote)
})
}
|
go
| 16
| 0.683099
| 77
| 34.625
| 8
|
// LoggingMiddleware returns a handler that wraps h and logs Gemini requests
// and their responses to the log package's standard logger.
// Requests are logged with the format "gemini: {host} {URL} {status code} {bytes written}".
|
function
|
func (tr *TypeRegistry) Update(u Type) (Type, error) {
name := u.Key()
if t, found := tr.Types[name]; !found {
return t, errors.New("Type " + name + " not defined but attempted update.")
}
tr.Types[name] = u
return u, nil
}
|
go
| 12
| 0.626087
| 77
| 27.875
| 8
|
// Update modifies a type by looking it up by name in the type registry and then
// replacing it.
// It is an error if the type does not exist.
|
function
|
public static void expediteScheduledPolling(String containerId)
{
PollingTimeSettings pollingTimeSettings = POLLING_TIME_CONFIGURATIONS.get(containerId);
if (pollingTimeSettings == null)
return;
synchronized (pollingTimeSettings) {
pollingTimeSettings.setNextPollingTime(new Date());
}
}
|
java
| 10
| 0.792079
| 89
| 32.777778
| 9
|
/*
* This method is invoked to schedule an immediate polling once the client knows that there are some updates in the container that are not
* reflected in the cache.
*/
|
function
|
class MensaQ: # Game class
"""
Defines the :class:`MensaQ` class, which provides base config for Mensa question game.
:param `number` creates first argument.
:param `ans` set to 0 as placeholder to increment guesses.
:param `mn` argument placeholder for minimum number.
:param `mx` argument placeholder for maximum number.
"""
def __init__(self, number, mn, mx):
self.number = number
self.ans = 0
self.min = mn
self.max = mx
def get_guess(self):
"""
Intializes the question for user input.
"""
guess = input(f'If you count from {get_min(self)} - {get_max(self)} how many {get_var(self)}s will you pass?: ')
print('')
if self.valid_number(guess):
return int(guess)
else:
print("Please enter only positive numbers (e.g. 1..2..3..) unless this question is too advanced.")
return self.get_guess()
def valid_number(self, str_number):
"""
Checks for a valid number input from user.
:param `str_number`: number object.
"""
try:
number = int(str_number)
except:
self.ans += 1
return False
return number >= 0
def play(self):
"""
Main game function with funny taunts and revealing solution output.
"""
while True:
self.ans += 1
guess = self.get_guess()
if guess < get_count():
yell()
continue
if guess > get_count():
taunt1()
continue
if self.ans < 3:
print(f"You solved it! Based on {self.ans} attempt(s) your IQ is around:")
high_iq()
break
if self.ans >= 3 and self.ans < 6:
print(f"You solved it! Based on {self.ans} attempt(s) you're IQ is around:")
med_iq()
break
if self.ans > 6:
print(f"You solved it! Based on {self.ans} attempt(s) you're IQ is around:")
low_iq()
break
else:
break
print(f"These are the {get_count()} instantces of the number {get_var(self)} take a look =>")
# loop to print out numbers containing the variable set by get_var
for i in range(get_min(self), get_max(self)):
if str(get_var('')) in str(i):
print(f'{i}', end = " ")
print()
print()
|
python
| 15
| 0.460666
| 120
| 30.090909
| 88
|
Defines the :class:`MensaQ` class, which provides base config for Mensa question game.
:param `number` creates first argument.
:param `ans` set to 0 as placeholder to increment guesses.
:param `mn` argument placeholder for minimum number.
:param `mx` argument placeholder for maximum number.
|
class
|
public class Solution {
public List<String> generateParenthesis(int n) {
List<String> result = new ArrayList<String>();
if (n < 1) {
return result;
}
List<StringBuilder> temp = new ArrayList<StringBuilder>();
temp.add(new StringBuilder());
int curr = 1;
while (curr <= n * 2) {
int N = temp.size();
for (int j = 0; j < N; j++) {
int sum = 0, open = 0;
StringBuilder sb = temp.get(j);
for (int i = 0; i < sb.length(); i++) {
if (sb.charAt(i) == '(') {
sum++;
open++;
} else {
sum--;
}
}
if (sum > 0 && open < n) {
StringBuilder sb1 = new StringBuilder(sb);
sb1.append(')');
temp.add(sb1);
sb.append('(');
} else if (sum > 0 && open >= n) {
sb.append(')');
} else if (sum == 0) {
sb.append('(');
}
}
for (StringBuilder sb: temp) {
System.out.format("'%s' ", sb.toString());
}
System.out.println();
curr++;
}
for (StringBuilder sb : temp) {
result.add(sb.toString());
}
return result;
}
}
|
java
| 16
| 0.357576
| 66
| 32.022222
| 45
|
/**
* Given n pairs of parentheses, write a function to generate all combinations of well-formed parentheses.
*
* For example, given n = 3, a solution set is:
*
* "((()))", "(()())", "(())()", "()(())", "()()()"
*/
|
class
|
def updatemutual(rpc: ElementsRPCCaller, plan: str, data: str, tx_data: str,
output: str, min_output: int, force: bool, network: str
) -> None:
select_chain_params(network)
debtor_data = read_aux_data(data)
with click.open_file(tx_data) as f:
mutual_tx_data = json.loads(f.read())
mutual_tx = CElementsMutableTransaction.deserialize(
x(mutual_tx_data["hex"]))
(
repayment_plan,
contract_tx_list,
vstage_list,
creditor_control_asset,
debtor_control_asset,
bitcoin_asset,
) = compute_info(rpc, debtor_data, read_plandata(plan), min_output,
Path(plan).parent)
if len(contract_tx_list) != len(vstage_list) is None:
raise click.ClickException(
"Contract seems to be finished, cannot update mutual-spend "
"transaction")
vstage = vstage_list[-1]
blind_info = BlindingInfo(
[
vstage.blinding_data.contract_input_descriptor,
BlindingInputDescriptor(
asset=creditor_control_asset,
amount=1,
blinding_factor=Uint256(),
asset_blinding_factor=Uint256(),
),
],
[CPubKey(x(data)) for data in mutual_tx_data["pubkeys"]]
)
last_debtor_utxo = find_asset_utxo_by_min_amount(
rpc, debtor_control_asset, 0
)
if last_debtor_utxo is None:
raise click.ClickException("The debtor control asset is not found")
debtor_control_tx = CElementsTransaction.deserialize(
x(rpc.getrawtransaction(last_debtor_utxo["txid"]))
)
debtor_control_txout_index = find_explicit_asset_txout_index(
debtor_control_tx, debtor_control_asset
)
mutual_tx.vin.append(
CElementsMutableTxIn(
CElementsOutPoint(
hash=debtor_control_tx.GetTxid(),
n=debtor_control_txout_index,
)
)
)
mutual_tx.wit.vtxinwit.append(CElementsMutableTxInWitness())
blind_info.descriptors.append(
BlindingInputDescriptor(
asset=debtor_control_asset,
amount=1,
blinding_factor=Uint256(),
asset_blinding_factor=Uint256(),
)
)
try:
principal_txout = find_explicit_asset_txout_index(
mutual_tx, repayment_plan.principal.asset
)
except DataLookupError:
need_amount = 0
else:
need_amount = mutual_tx.vout[principal_txout].nValue.to_amount()
if need_amount > 0:
debt_utxo = find_asset_utxo_by_amount(
rpc, vstage.plan.principal.asset, need_amount
)
if debt_utxo is None:
if not force:
answer = click.confirm(
f"You don't have the utxo with {need_amount} sat "
f"to pay the debt. Do you want it will be created?"
)
if not answer:
click.echo(
f"You must create an utxo with {need_amount} "
f"manually to add the debt to the transaction"
)
return
debt_utxo_info = make_utxo(
rpc, need_amount, vstage.plan.principal.asset
)
else:
debt_utxo_info = parse_utxo_dict(debt_utxo)
mutual_tx.vin.append(CElementsMutableTxIn(
debt_utxo_info.outpoint))
mutual_tx.wit.vtxinwit.append(CElementsMutableTxInWitness())
blind_info.descriptors.append(
debt_utxo_info.blinding_input_descriptor)
debtor_collateral_return_address = CCoinConfidentialAddress(
rpc.getnewaddress())
mutual_tx.vout.append(
CElementsTxOut(
nValue=CConfidentialValue(repayment_plan.collateral.amount),
nAsset=CConfidentialAsset(repayment_plan.collateral.asset),
scriptPubKey=debtor_collateral_return_address.to_scriptPubKey(),
).to_mutable()
)
mutual_tx.wit.vtxoutwit.append(CElementsMutableTxOutWitness())
blind_info.pubkeys.append(
debtor_collateral_return_address.blinding_pubkey
)
fee_amount = calculate_fee(rpc, COMMON_TX_APPROX_SIZE)
fee_utxo_info = get_fee_utxo(
rpc, fee_amount + MIN_GUARANTEED_CHANGE, bitcoin_asset
)
fee_change_address = CCoinAddress(rpc.getnewaddress())
fee_utxo_amount = fee_utxo_info.blinding_input_descriptor.amount
change_amount = fee_utxo_amount - fee_amount
assert change_amount > 0
mutual_tx.vin.append(CElementsMutableTxIn(fee_utxo_info.outpoint))
mutual_tx.wit.vtxinwit.append(CElementsMutableTxInWitness())
blind_info.descriptors.append(fee_utxo_info.blinding_input_descriptor)
mutual_tx.vout.append(
CElementsMutableTxOut(
nValue=CConfidentialValue(change_amount),
nAsset=CConfidentialAsset(bitcoin_asset),
scriptPubKey=fee_change_address.to_scriptPubKey(),
)
)
mutual_tx.vout.append(
CElementsMutableTxOut(
nValue=CConfidentialValue(fee_amount),
nAsset=CConfidentialAsset(bitcoin_asset),
)
)
mutual_tx.wit.vtxoutwit.append(CElementsMutableTxOutWitness())
mutual_tx.wit.vtxoutwit.append(CElementsMutableTxOutWitness())
blind_info.pubkeys.append(CPubKey())
blind_info.pubkeys.append(CPubKey())
last_contract_tx = contract_tx_list[-1]
assert sum(out.nValue.to_amount() for out in mutual_tx.vout) == sum(
idesc.amount for idesc in blind_info.descriptors
)
blind_tx_and_validate(mutual_tx, blind_info.descriptors,
blind_info.pubkeys)
checked_outs_data = b"".join(txout.serialize() for txout
in mutual_tx.vout[:2])
other_outs_data = b"".join(txout.serialize() for txout
in mutual_tx.vout[2:])
full_outs_data = b"".join(txout.serialize() for txout in mutual_tx.vout)
assert len(full_outs_data) <= MAX_SCRIPT_ELEMENT_SIZE, len(full_outs_data)
offset = vstage.script_data.checked_outs_hashes.index(
hashlib.sha256(checked_outs_data).digest()
)
assert offset % 32 == 0
sign_for_covenant(
mutual_tx,
0,
[offset // 32, 0],
checked_outs_data,
other_outs_data,
last_contract_tx.vout[CONTRACT_COLLATERAL_OUT_INDEX].nValue,
vstage.script_data.script,
)
mutual_tx = sign_tx_with_wallet(rpc, mutual_tx)
with click.open_file(output, mode="x") as f:
f.write(b2x(mutual_tx.to_immutable().serialize()))
print(f"the transaction data was saved to {output}")
|
python
| 17
| 0.609964
| 78
| 39.02994
| 167
|
Update mutual-spend transaction with debtor's signature
and required witness for the collateral input
|
function
|
public static string WrapText(SpriteFont font, string text, float maxWidth)
{
List<string> wrappedLines = new List<string>();
StringBuilder builder = new StringBuilder();
string[] delimiters = new string[2] { Environment.NewLine, "\n" };
string[] lines = text.Split(delimiters, StringSplitOptions.None);
foreach (string line in lines)
{
string wrappedLine = WrapLine(font, line, maxWidth);
wrappedLines.Add(wrappedLine);
}
int lineCount = wrappedLines.Count;
for (int lineIndex = 0; lineIndex < lineCount; lineIndex++)
{
string wrappedLine = wrappedLines[lineIndex];
if (lineIndex < lineCount - 1)
{
builder.AppendLine(wrappedLine);
}
else
{
builder.Append(wrappedLine);
}
}
return builder.ToString();
}
|
c#
| 13
| 0.506208
| 78
| 39.307692
| 26
|
/// <summary>
/// Wrap a text string to fit within the given width.
/// </summary>
///
/// <param name="font">
/// The font intended for drawing the text.
/// </param>
/// <param name="text">
/// The text to wrap.
/// </param>
/// <param name="maxWidth">
/// The maximum line width for the text.
/// </param>
///
/// <returns>
/// A wrapped piece of text.
/// </returns>
|
function
|
public IHoconElement resolveSubstitutionOnCurrentPath(HoconSubstitution sub)
throws HoconParseException {
var targetPath = sub.path;
int ptr = 0;
while (targetPath != null && ptr < currentPath.size() - 1) {
if (!targetPath.name.equals(currentPath.get(ptr)))
break;
targetPath = targetPath.next;
ptr++;
}
if (targetPath == null)
throw new HoconParseException("Invalid self referential", ptr);
var latestCommonAncestor = pathStack.get(ptr);
if (sub.isDetermined)
return latestCommonAncestor.getPath(targetPath).clone();
else
try {
return latestCommonAncestor.getPath(targetPath).clone();
} catch (Exception e) {
return new HoconSubstitution.NullSubstitution();
}
}
|
java
| 12
| 0.665375
| 76
| 34.227273
| 22
|
/**
* Resolves a substitution when parsing.
*
* @param sub the substitution to resolve
* @return the resolved target element
* @throws HoconParseException thrown when the substitution is invalid
*/
|
function
|
func (conn *DB) JoinsUserAndDefaultItemStrings(user *User) *DB {
return conn.
Joins(
`LEFT JOIN items_strings default_strings
ON default_strings.item_id = items.id AND default_strings.language_tag = items.default_language_tag`).
Joins(`LEFT JOIN items_strings user_strings
ON user_strings.item_id=items.id AND user_strings.language_tag = ?`, user.DefaultLanguage)
}
|
go
| 9
| 0.7289
| 111
| 48
| 8
|
// JoinsUserAndDefaultItemStrings joins items_strings with the given view twice
// (as default_strings for item's default language and as user_strings for the user's default language)
|
function
|
@GET
@Path("/{id}/showQueryWizardResults")
@Produces({"application/xml", "text/xml", "application/json", "text/yaml", "text/x-yaml", "application/x-yaml", "text/html"})
@Interceptors({ResponseInterceptor.class, RequiredInterceptor.class})
@Timed(name = "dw.query.showQueryWizardResults", absolute = true)
public QueryWizardResultResponse showQueryWizardResults(@Required("id") @PathParam("id") String id) {
QueryWizardResultResponse theResponse = new QueryWizardResultResponse(jqueryUri, dataTablesUri);
theResponse.setQueryId(id);
BaseQueryResponse theNextResults;
try {
theNextResults = queryExecutor.next(id);
} catch (Exception e) {
theNextResults = null;
}
theResponse.setResponse(theNextResults);
return theResponse;
}
|
java
| 9
| 0.681818
| 129
| 48.235294
| 17
|
/**
* Gets the next page of results from the query object. If the object is no longer alive, meaning that the current session has expired, then this fail. The
* response object type is dynamic, see the listQueryLogic operation to determine what the response type object will be.
*
* @param id
* - (@Required)
* @see datawave.webservice.query.runner.BasicQueryBean#showQueryWizardResults(String) for the @Required definition
*
* @return datawave.webservice.result.BaseQueryResponse
* @RequestHeader X-ProxiedEntitiesChain use when proxying request for user, by specifying a chain of DNs of the identities to proxy
* @RequestHeader X-ProxiedIssuersChain required when using X-ProxiedEntitiesChain, specify one issuer DN per subject DN listed in X-ProxiedEntitiesChain
* @RequestHeader query-session-id session id value used for load balancing purposes. query-session-id can be placed in the request in a Cookie header or as
* a query parameter
* @ResponseHeader X-OperationTimeInMS time spent on the server performing the operation, does not account for network or result serialization
* @ResponseHeader X-query-page-number page number returned by this call
* @ResponseHeader X-query-last-page if true then there are no more pages for this query, caller should call close()
* @ResponseHeader X-Partial-Results true if the page contains less than the requested number of results
*
* @HTTP 200 success
* @HTTP 204 success and no results
* @HTTP 404 if id not found
* @HTTP 412 if the query is no longer alive, client should call #reset(String) and try again
* @HTTP 500 internal server error
*/
|
function
|
public class FraudSubmit : Sale
{
internal string Operation => "fraud:submit";
public new string CardNum { get; set; }
public string GatewayRefNum { get; set; }
public StatusType GatewayResult { get; set; }
public string GatewayCVV { get; set; }
public AvsResponseType GatewayAVS { get; set; }
public string GatewayError { get; set; }
public OrderType OrderType { get; set; }
}
|
c#
| 6
| 0.621381
| 55
| 39.909091
| 11
|
/// <summary>
/// The Submit command is used in conjunction with a valid FraudWatch account to submit ecommerce transactions for a fraud verification check.
/// </summary>
|
class
|
public abstract class FileAwareWarningsGuard extends WarningsGuard {
private final AbstractCompiler compiler;
protected FileAwareWarningsGuard(AbstractCompiler compiler) {
this.compiler = compiler;
}
@Nullable
protected final Node getScriptNodeForError(JSError error) {
// If error.node is connected to AST, this will be much faster than compiler.getScriptNode
for (Node n = error.getNode(); n != null; n = n.getParent()) {
if (n.isScript()) {
return n;
}
}
if (error.getSourceName() == null) {
return null;
}
Node scriptNode = compiler.getScriptNode(error.getSourceName());
if (scriptNode != null) {
// TODO(b/73088845): This should always be a SCRIPT node
if (!scriptNode.isScript()) {
return null;
}
checkState(scriptNode.isScript(), scriptNode);
return scriptNode;
}
return null;
}
}
|
java
| 12
| 0.657837
| 94
| 28.258065
| 31
|
/**
* An abstract WarningsGuard that provides an additional getScriptNodeForError() method
* for accessing the containing SCRIPT node of the AST in a robust way.
*/
|
class
|
@PostMapping("/complaints")
@PreAuthorize("hasAnyRole('USER', 'TA', 'INSTRUCTOR', 'ADMIN')")
public ResponseEntity<Complaint> createComplaint(@RequestBody Complaint complaint, Principal principal) throws URISyntaxException {
log.debug("REST request to save Complaint: {}", complaint);
if (complaint.getId() != null) {
throw new BadRequestAlertException("A new complaint cannot already have an id", ENTITY_NAME, "idexists");
}
if (complaint.getResult() == null || complaint.getResult().getId() == null) {
throw new BadRequestAlertException("A complaint can be only associated to a result", ENTITY_NAME, "noresultid");
}
if (complaintRepository.findByResult_Id(complaint.getResult().getId()).isPresent()) {
throw new BadRequestAlertException("A complaint for this result already exists", ENTITY_NAME, "complaintexists");
}
Result originalResult = resultRepository.findById(complaint.getResult().getId())
.orElseThrow(() -> new BadRequestAlertException("The result you are referring to does not exist", ENTITY_NAME, "resultnotfound"));
User originalSubmissor = originalResult.getParticipation().getStudent();
Long courseId = originalResult.getParticipation().getExercise().getCourse().getId();
long numberOfUnacceptedComplaints = complaintRepository.countUnacceptedComplaintsByStudentIdAndCourseId(originalSubmissor.getId(), courseId);
if (numberOfUnacceptedComplaints >= MAX_COMPLAINT_NUMBER_PER_STUDENT) {
throw new BadRequestAlertException("You cannot have more than " + MAX_COMPLAINT_NUMBER_PER_STUDENT + " open or rejected complaints at the same time.", ENTITY_NAME,
"toomanycomplaints");
}
if (originalResult.getCompletionDate().isBefore(ZonedDateTime.now().minusWeeks(1))) {
throw new BadRequestAlertException("You cannot submit a complaint for a result that is older than one week.", ENTITY_NAME, "resultolderthanaweek");
}
if (!originalSubmissor.getLogin().equals(principal.getName())) {
throw new BadRequestAlertException("You can create a complaint only for a result you submitted", ENTITY_NAME, "differentuser");
}
originalResult.setHasComplaint(true);
complaint.setSubmittedTime(ZonedDateTime.now());
complaint.setStudent(originalSubmissor);
complaint.setResult(originalResult);
try {
complaint.setResultBeforeComplaint(resultService.getOriginalResultAsString(originalResult));
}
catch (JsonProcessingException exception) {
throw new InternalServerErrorException("Failed to store original result");
}
resultRepository.save(originalResult);
Complaint savedComplaint = complaintRepository.save(complaint);
return ResponseEntity.created(new URI("/api/complaints/" + savedComplaint.getId()))
.headers(HeaderUtil.createEntityCreationAlert(ENTITY_NAME, savedComplaint.getId().toString())).body(savedComplaint);
}
|
java
| 13
| 0.700968
| 175
| 71.116279
| 43
|
/**
* POST /complaint: create a new complaint
*
* @param complaint the complaint to create
* @return the ResponseEntity with status 201 (Created) and with body the new complaints
* @throws URISyntaxException if the Location URI syntax is incorrect
*/
|
function
|
class VMeshData {
constructor(type = 1, surface = 4, format = 0) {
this.type = type;
this.surface = surface;
this.format = format;
this.scale = 1;
this.groups = [];
this.indices = undefined;
this.vertices = undefined;
}
/**
* Set FVF bitmask to object properties
* @param {Number} mask
*/
set format(mask) {
this.hasPosition = (mask & FVF_POSITION) > 0;
this.hasNormal = (mask & FVF_NORMAL) > 0;
this.hasDiffuse = (mask & FVF_DIFFUSE) > 0;
this.hasSpecular = (mask & FVF_SPECULAR) > 0;
this.mapCount = (mask & FVF_MAP_MASK) >> 8;
this.vertexLength = 0;
if (this.hasPosition) this.vertexLength += FVF_POSITION_SIZE;
if (this.hasNormal) this.vertexLength += FVF_NORMAL_SIZE;
if (this.hasDiffuse) this.vertexLength += FVF_DIFFUSE_SIZE;
if (this.hasSpecular) this.vertexLength += FVF_SPECULAR_SIZE;
this.vertexLength += this.mapCount * FVF_MAP_SIZE;
this.positionOffset = this.hasPosition ? 0 : undefined;
this.normalOffset = this.hasNormal ? (this.hasPosition ? FVF_POSITION_SIZE : 0) : undefined;
this.diffuseOffset = this.hasDiffuse ? (this.hasPosition ? FVF_POSITION_SIZE : 0) + (this.hasNormal ? FVF_NORMAL_SIZE : 0) : undefined;
this.specularOffset = this.hasSpecular ? (this.hasPosition ? FVF_POSITION_SIZE : 0) + (this.hasNormal ? FVF_NORMAL_SIZE : 0) + (this.hasDiffuse ? FVF_DIFFUSE_SIZE : 0) : undefined;
this.mapOffset = this.mapCount ? (this.hasPosition ? FVF_POSITION_SIZE : 0) + (this.hasNormal ? FVF_NORMAL_SIZE : 0) + (this.hasDiffuse ? FVF_DIFFUSE_SIZE : 0) + (this.hasSpecular ? FVF_SPECULAR_SIZE : 0) : undefined;
}
/**
* Get FVF bitmask from object properties
* @return {Number}
*/
get format() {
let mask = 0;
if (this.hasPosition) mask |= FVF_POSITION;
if (this.hasNormal) mask |= FVF_NORMAL;
if (this.hasDiffuse) mask |= FVF_DIFFUSE;
if (this.hasSpecular) mask |= FVF_SPECULAR;
mask |= (this.mapCount << 8);
return mask;
}
/**
* Get vertex count in buffer
* @return {Number}
*/
get vertexCount() {
return this.vertices.byteLength / this.vertexLength;
}
/**
* @param {Number} index
* @return {Number}
*/
getMapOffset(index = 0) {
return index < this.mapCount ? this.mapOffset + index * FVF_MAP_SIZE : null;
}
*getGroupsByReference(reference) {
for (let i = 0; i < reference.groupCount; i++) yield this.groups[reference.groupStart + i];
}
/**
* Load data
* @param {ArrayBufferWalker} data
* @return {Boolean}
*/
load(data) {
if (! (data instanceof ArrayBufferWalker)) throw new TypeError("Invalid data object");
let [type, surface] = data.readInt32(2);
if (type != 0x1) throw new TypeError("Unrecognized mesh type");
if (surface != 0x4) throw new TypeError("Unrecognized surface type");
let [groupCount, indexCount, format, vertexCount] = data.readInt16(4);
this.format = format; // D3D FVF mask flags
this.groups = [];
// Read mesh groups
for (let g = 0; g < groupCount; g++) (this.groups[g] = new VMeshGroup()).load(data);
// Read indices
this.indices = data.readInt16(indexCount);
// Read vertices
this.vertices = data.readInt8(vertexCount * this.vertexLength);
if (vertexCount != this.vertexCount) throw new RangeError("Mesh has invalid number of vertices");
return true;
}
/**
* Returns array of position vectors for wireframe buffer data
* @return {Float32Array}
*/
getPositions() {
if (! this.hasPosition) return false;
const vertexCount = this.vertexCount,
vertexLength = this.vertexLength,
vertexView = new DataView(this.vertices.buffer, this.vertices.byteOffset, this.vertices.byteLength),
positions = new Float32Array(vertexCount * 3);
for (let v = 0, p = 0, e = 0; v < vertexCount; v++) {
e = v * vertexLength;
positions[p++] = vertexView.getFloat32(e, true);
positions[p++] = vertexView.getFloat32(e + 4, true);
positions[p++] = vertexView.getFloat32(e + 8, true);
}
return positions;
}
}
|
javascript
| 14
| 0.662312
| 227
| 30.346457
| 127
|
/**
* Mesh data contains index and vertex buffers
* Format determines vertex buffer attributes (D3D Flexible Vertex Format)
* Portions are pulled via VMeshRef
*
* @property {Number} type Mesh format?
* @property {Number} surface Hitbox format?
* @property {Number} format Vertex data format (FVF)
* @property {Number} scale Default scale to render
* @property {Array} groups Material groups
* @property {Uint16Array} indices Triangle indices
* @property {Uint8Array} vertices Vertex data buffer
* @property {Boolean} hasPosition
* @property {Boolean} hasNormal
* @property {Boolean} hasDiffuse
* @property {Boolean} hasSpecular
* @property {Number} mapCount
* @property {Number} vertexLength
* @property {Number} vertexCount
* @property {Number} positionOffset
* @property {Number} normalOffset
* @property {Number} diffuseOffset
* @property {Number} specularOffset
* @property {Number} mapOffset
*/
|
class
|
function FileWatcher(metadataCache) {
this.queue_ = new AsyncUtil.Queue();
this.metadataCache_ = metadataCache;
this.watchedDirectoryEntry_ = null;
this.onDirectoryChangedBound_ = this.onDirectoryChanged_.bind(this);
chrome.fileManagerPrivate.onDirectoryChanged.addListener(
this.onDirectoryChangedBound_);
this.filesystemMetadataObserverId_ = null;
this.thumbnailMetadataObserverId_ = null;
this.externalMetadataObserverId_ = null;
}
|
javascript
| 8
| 0.787281
| 70
| 40.545455
| 11
|
/**
* Watches for changes in the tracked directory, including local metadata
* changes.
*
* @param {MetadataCache} metadataCache Instance of MetadataCache.
* @extends {cr.EventTarget}
* @constructor
*/
|
function
|
class FlightDynamicsExperiment:
"""The symbench athens client's experiment class.
Experiment with the SWRI's flight dynamics software based on a fixed-bemp design.
This class abstracts the enchilada of design constructs,
exposing the domain scientist i.e. you to things you care about
i.e. those design variables. Many instances of this classes will be
available and this is what people are supposed to work on.
This class also assumes that the flight dynamics software from SWRI is installed
and available in your PATH.
..warning::
Experimental API, subject to changes
Parameters
----------
design: symbench_athens_client.models.design.SeedDesign
The design instance to run this experiment on
testbenches: str, pathlib.Path or list/set/tuple thereof
The location of the testbench data for estimating mass properties of a design
propellers_data: str, pathlib.Path
The location of the propellers data
fdm_path: str, pathlib.Path
The location of the fdm executable, if None, its assumed to be in PATH
estimator: function, optional, default=None
The estimator function from uav_analyisis library to use, If None, quadcopter_fixed_bemp2 is used.
Attributes
----------
session_id: str
ISO Formatted time stamp
results_dir: str, pathlib.Path
The results directory
Notes
-----
Every run gets a guid (returned in the output dictionary). The results for each
run (the flight dynamics input and output files) are saved in results/artifacts.
The results/output.csv file is what you should look for if you ever want to revisit
the metrics.
"""
def __init__(
self,
design,
testbenches,
propellers_data,
valid_parameters,
valid_requirements,
fdm_path=None,
estimator=None,
):
self.testbenches, self.propellers_data = self._validate_files(
testbenches, propellers_data
) # ToDo: More robust Validation here
self.design = design
self.valid_parameters = valid_parameters
self.valid_requirements = valid_requirements
self.logger = get_logger(self.__class__.__name__)
self.session_id = f"e-{datetime.now().isoformat()}".replace(":", "-")
self.executor = FDMExecutor(fdm_path=fdm_path)
self.results_dir = Path(
f"results/{self.design.__class__.__name__}/{self.session_id}"
).resolve()
self.formulae = estimate_mass_formulae(
frozenset(self.testbenches),
estimator=estimator or quad_copter_fixed_bemp2,
)
self.start_new_session()
def _create_results_dir(self):
if not self.results_dir.exists():
os.makedirs(self.results_dir, exist_ok=True)
(self.results_dir / ".generated").touch()
self._add_component_and_connection_map()
artifacts_dir = self.results_dir / "artifacts"
if not artifacts_dir.exists():
os.makedirs(artifacts_dir)
def _add_component_and_connection_map(self):
extract_from_zip(
self.testbenches[0],
self.results_dir,
{
"componentMap.json",
"connectionMap.json",
},
)
self._customize_components()
def _customize_components(self, out_dir=None):
with (self.results_dir / "componentMap.json").open("r") as components_file:
components = json.load(components_file)
design_components = self.design.components(by_alias=True)
for component in components:
if component["FROM_COMP"] in design_components:
component["LIB_COMPONENT"] = design_components[
component["FROM_COMP"]
]
if out_dir is None:
out_dir = self.results_dir
with (out_dir / "componentMap.json").open("w") as components_file:
json.dump(components, components_file)
self.design.swap_list = {}
def start(self):
self._create_results_dir()
def run_for(
self,
parameters=None,
requirements=None,
change_dir=False,
write_to_output_csv=False,
):
"""Run the flight dynamics for the given parameters and requirements"""
parameters = self._validate_dict(parameters, "parameters")
requirements = self._validate_dict(requirements, "requirements")
for key, value in parameters.items():
if key in self.valid_parameters:
setattr(self.design, key, value)
self.logger.info(
f"About to execute FDM on {self.design.__class__.__name__}, "
f"parameters: {self.design.parameters()}, "
f"requirements: {requirements}"
)
run_guid = str(uuid4())
fd_files_base_path = self.results_dir / "artifacts" / run_guid
os.makedirs(fd_files_base_path, exist_ok=True)
if self.design.swap_list != dict():
self._customize_components(fd_files_base_path)
metrics = {"GUID": run_guid, "AnalysisError": None}
try:
current_dir = os.getcwd()
if change_dir:
os.chdir(fd_files_base_path)
for i in [1, 3, 4, 5]:
fd_input_path = f"FlightDyn_Path{i}.inp"
fd_output_path = f"FlightDynReport_Path{i}.out"
self.design.to_fd_input(
testbench_path_or_formulae=self.formulae,
requested_vertical_speed=0
if i != 4
else requirements.get("requested_vertical_speed", -2),
requested_lateral_speed=0
if i == 4
else int(requirements.get("requested_lateral_speed", 10)),
flight_path=i,
propellers_data_path=relative_path(
os.getcwd(), self.propellers_data
)
+ os.sep,
filename=fd_input_path,
)
input_metrics, flight_metrics, path_metrics = self.executor.execute(
str(fd_input_path), str(fd_output_path)
)
# Input Metrics
metrics.update(input_metrics.to_csv_dict())
other_metrics = self.design.parameters()
for key in other_metrics:
if key.startswith("Length"):
metrics[key] = other_metrics[key]
# Get the FlightPath metrics
metrics.update(flight_metrics.to_csv_dict())
metrics.update(path_metrics.to_csv_dict())
# Move input and output files to necessary locations
if not change_dir:
move(fd_input_path, fd_files_base_path)
move(fd_output_path, fd_files_base_path)
move("./metrics.out", fd_files_base_path / f"metrics_Path{i}.out")
# Remove metrics.out, score.out namemap.out
cleanup_score_files()
# Update the total score
update_total_score(metrics)
metrics["AnalysisError"] = False
if change_dir:
os.chdir(current_dir)
except Exception as e:
metrics["AnalysisError"] = True
raise e
if write_to_output_csv:
write_output_csv(output_dir=self.results_dir, metrics=metrics)
return metrics
def start_new_session(self):
self.session_id = f"e-{datetime.now().isoformat()}".replace(":", "-")
self.results_dir = Path(
f"results/{self.design.__class__.__name__}/{self.session_id}"
).resolve()
self.start()
@staticmethod
def _validate_dict(var, name):
if var and not isinstance(var, dict):
raise TypeError(
f"Expecting {name} to be a dictionary, got {type(var)} instead"
)
return var or {}
@staticmethod
def _validate_files(testbenches, propellers_data):
if isinstance(testbenches, (list, set, tuple)):
assert all(
Path(testbench).resolve().exists() for testbench in testbenches
), "Testbench data paths are not valid"
else:
testbenches = Path(testbenches).resolve()
assert testbenches.exists(), "The testbench data path doesn't exist"
testbenches = [testbenches]
propellers_data = Path(propellers_data).resolve()
assert (
propellers_data.resolve().exists()
), "The propellers data path doesn't exist"
tb = TestbenchData()
try:
for d in testbenches:
tb.load(str(d))
except:
raise TypeError("The testbench data provided is not valid")
return testbenches, propellers_data
|
python
| 19
| 0.574437
| 106
| 34.660079
| 253
|
The symbench athens client's experiment class.
Experiment with the SWRI's flight dynamics software based on a fixed-bemp design.
This class abstracts the enchilada of design constructs,
exposing the domain scientist i.e. you to things you care about
i.e. those design variables. Many instances of this classes will be
available and this is what people are supposed to work on.
This class also assumes that the flight dynamics software from SWRI is installed
and available in your PATH.
..warning::
Experimental API, subject to changes
Parameters
----------
design: symbench_athens_client.models.design.SeedDesign
The design instance to run this experiment on
testbenches: str, pathlib.Path or list/set/tuple thereof
The location of the testbench data for estimating mass properties of a design
propellers_data: str, pathlib.Path
The location of the propellers data
fdm_path: str, pathlib.Path
The location of the fdm executable, if None, its assumed to be in PATH
estimator: function, optional, default=None
The estimator function from uav_analyisis library to use, If None, quadcopter_fixed_bemp2 is used.
Attributes
----------
session_id: str
ISO Formatted time stamp
results_dir: str, pathlib.Path
The results directory
Notes
-----
Every run gets a guid (returned in the output dictionary). The results for each
run (the flight dynamics input and output files) are saved in results/artifacts.
The results/output.csv file is what you should look for if you ever want to revisit
the metrics.
|
class
|
func (p *Prowlarr) GetBackupFilesContext(ctx context.Context) ([]*starr.BackupFile, error) {
var output []*starr.BackupFile
if err := p.GetInto(ctx, "v1/system/backup", nil, &output); err != nil {
return nil, fmt.Errorf("api.Get(system/backup): %w", err)
}
return output, nil
}
|
go
| 10
| 0.689046
| 92
| 39.571429
| 7
|
// GetBackupFiles returns all available Prowlarr backup files.
// Use GetBody to download a file using BackupFile.Path.
|
function
|
class TestTemplate : public testing::Test {
protected:
void SetUp()
{
ble = &BLE::Instance();
}
void TearDown()
{
ble::delete_mocks();
}
BLE* ble;
events::EventQueue queue;
}
|
c++
| 10
| 0.536036
| 43
| 16.153846
| 13
|
/* This test does not test anything, you may use it as a template for your unit tests.
* It shows all the elements you need to use mocks for all the ble APIS. */
|
class
|
static errno_t ipv4_infilter(void *cookie, mbuf_t *m, int offset, u_int8_t protocol)
{
switch (protocol) {
case IPPROTO_GRE:
{
#ifdef DEBUG
printf("%s: got packet\n", __FUNCTION__);
#endif
mbuf_t m0 = in_gre_input(*m, offset);
if (m0 == NULL)
return EJUSTRETURN;
else
*m = m0;
break;
}
case IPPROTO_MOBILE:
{
#ifdef DEBUG
printf("%s: got packet\n", __FUNCTION__);
#endif
mbuf_t m0 = in_mobile_input(*m, offset);
if (m0 == NULL)
return EJUSTRETURN;
else
*m = m0;
break;
}
default:
break;
}
return 0;
}
|
c
| 12
| 0.43342
| 84
| 22.96875
| 32
|
/* the caller who call this function(ipv4_infilter) will free the mbuf when
* it returns any error except EJUSTRETURN.
* so, remember to check the function called by this function if it frees
* the mbuf chain on error. That is, do remember return EJUSTRETURN
* if you frees the mbuf or the function called by this function frees the mbuf.
* Otherwise, DOUBLE FREE, causing kernel panic...
*
* return ZERO if this filter is not interested in the packet
* otherwise, it means this filter deal with the packet, and other filters will
* not see this packet
*
* @param cookie
* @param m
* @param offset ip header offset
* @param protocol proto, IPPROTO_GRE/IPPROTO_MOBILE
*/
|
function
|
def _get_inputs(cls, inputs, input_index):
input_split = len(inputs) // 2
start_idx = input_split * input_index
retval = inputs[start_idx: start_idx + input_split]
logger.debug("model inputs: %s, input_split: %s, start_idx: %s, inference_inputs: %s",
inputs, input_split, start_idx, retval)
return retval
|
python
| 8
| 0.59726
| 94
| 51.285714
| 7
|
Obtain the inputs for the requested swap direction.
Parameters
----------
inputs: list
The full list of input tensors to the saved faceswap training model
input_index: int
The input index for the requested swap direction
Returns
-------
list
List of input tensors to feed the model for the requested swap direction
|
function
|
public static void main(String[] args) throws Exception{
int port;
if( args.length == 0 ){
port = 8085;
}else if( args.length == 1 ){
port = Integer.parseInt(args[0]);
}else{
System.err.println("Too many command line arguments!");
System.err.println("Usage: "+TestServer.class.getCanonicalName()+" [port]");
System.exit(-1);
return;
}
TestServer server = new TestServer(true);
server.register(Webclient.class);
server.register(Webadmin.class);
try{
server.start(new InetSocketAddress(port));
if( checkWebclient() ){
System.err.println("Webclient at: "+server.jetty.getURI().resolve("/webclient/default.htm"));
System.err.println("Webadmin at: "+server.jetty.getURI().resolve("/admin/default.htm"));
}
System.err.println("PM service at: "+server.getPMServiceURI());
server.join();
}finally{
server.destroy();
}
}
|
java
| 14
| 0.670843
| 97
| 31.555556
| 27
|
/**
* Run the test server with with the official i2b2
* webclient.
* @param args command line arguments: port can be specified optionally
* @throws Exception any error
*/
|
function
|
def add_color(self, name, model, description):
if self.color is False:
self.packages.append(Package("color"))
self.color = True
self.preamble.append(Command("definecolor", arguments=[name,
model,
description]))
|
python
| 11
| 0.428191
| 77
| 52.857143
| 7
|
r"""Add a color that can be used throughout the document.
Args
----
name: str
Name to set for the color
model: str
The color model to use when defining the color
description: str
The values to use to define the color
|
function
|
func BrokerFactory(cnf *settings.MachineryConfig) (brokeriface.Broker, error) {
if strings.HasPrefix(cnf.Broker, "eager") {
return eagerbroker.New(), nil
}
if strings.HasPrefix(cnf.Broker, "nats:") {
return powerhouse.New(cnf), nil
}
return nil, errors.Errorf("Factory failed with broker URL: %v", cnf.Broker)
}
|
go
| 9
| 0.728125
| 79
| 34.666667
| 9
|
// BrokerFactory creates a new object of iface.Broker
// Currently only AMQP/S broker is supported
|
function
|
def _VerticalAlignFunctionArguments(self, output_filename):
with open(output_filename, 'r', encoding='utf8') as file_object:
lines = file_object.readlines()
alignment_number_of_spaces = 0
alignment_number_of_tabs = 0
in_function_call = False
with open(output_filename, 'w', encoding='utf8') as file_object:
for line in lines:
if not line.startswith('\t'):
file_object.write(line)
continue
stripped_line = line.rstrip()
if in_function_call:
if stripped_line.endswith(')') or stripped_line.endswith(');'):
in_function_call = False
stripped_line = line.lstrip()
line = '{0:s}{1:s}{2:s}'.format(
'\t' * alignment_number_of_tabs,
' ' * alignment_number_of_spaces,
stripped_line)
elif stripped_line.endswith('('):
in_function_call = True
stripped_line = line.lstrip()
alignment_number_of_spaces = stripped_line.rfind(' ')
if alignment_number_of_spaces == -1:
alignment_number_of_spaces = 1
else:
alignment_number_of_spaces += 2
alignment_number_of_tabs = len(line) - len(stripped_line)
file_object.write(line)
|
python
| 16
| 0.581948
| 73
| 41.133333
| 30
|
Vertically aligns function arguments.
Note this is a very basic approach that should suffice for the yaltools and
pyyal Python module source files.
Args:
output_filename (str): path of the output file.
|
function
|
Stmt WrapLayoutTransformationAttrs(const Stage& stage, Stmt body) {
if (stage->layout_transforms.size()) {
for (int i = 0; i < stage->op->num_outputs(); i++) {
body = AttrStmt(Array<ObjectRef>{stage->op.output(i), stage->layout_transforms},
tir::attr::layout_transforms, 1, body);
}
}
if (stage->axis_separators.size()) {
for (int i = 0; i < stage->op->num_outputs(); i++) {
body = AttrStmt(Array<ObjectRef>{stage->op.output(i), stage->axis_separators},
tir::attr::axis_separators, 1, body);
}
}
return body;
}
|
c++
| 16
| 0.584459
| 86
| 38.533333
| 15
|
// Annotate the statement with the layout transforms and axis
// separators of the stage. These annotations are removed during
// SchedulePostProcToPrimFunc. Afterwards, layout transforms are
// specified in the PrimFunc attrs, and the axis_separators are
// specified in the BufferNode.
|
function
|
def join_event(self, user, event_id):
with pymssql.connect(server = DB_SERVER, user = DB_USER, password = DB_PASSWORD, database = DB_NAME) as db_conn:
with db_conn.cursor(as_dict = True) as cursor:
try:
cursor.callproc('AddUserToEvent', (user, event_id))
db_conn.commit()
cursor.execute('select EventDescription, EventDate, EventTime '
'from [Event] join SlackUserToEvent on [Event].EventID = SlackUserToEvent.EventID '
'where SlackUserID = %s and [Event].EventID = %s', (user, event_id))
event = cursor.fetchall()
if event:
timestamp = int(time.mktime(time.strptime('%s %s' % (event[0]['EventDate'],
event[0]['EventTime']),
'%Y-%m-%d %H:%M:%S.0000000')))
response = self.client.api_call('reminders.add',
token = self.client.token,
text = event[0]['EventDescription'],
time = timestamp,
user = user)
if response['ok']:
return 'Reminder successfully created'
else:
return 'Failed to create reminder'
else:
raise pymssql.DatabaseError('Failed to join event')
except pymssql.DatabaseError as e:
print e.message
return jsonify({
'response_type': 'ephemeral',
'text': 'You could not be added to the event',
'content-type': 'application/json',
'replace-original': True
})
|
python
| 23
| 0.522573
| 114
| 48.030303
| 33
|
Adds a user to an event and creates a Slack reminder.
:param user: str
The ID of the Slack user to add to the event.
:param event_id: int
The ID of the event to add the Slack user to.
:return: The json response that Slack returns when creating a reminder, or an error message.
|
function
|
private void openDesktop(File file, ResourceBundle resources) {
try {
if (Desktop.isDesktopSupported()) {
Desktop.getDesktop().open(file);
return;
} else {
throw new DesktopNotSupportedException(resources.getString("DesktopException"));
}
} catch (DesktopNotSupportedException dnse) {
showError(resources.getString("DesktopIncompatible"), dnse.getMessage());
} catch (Exception e) {
showError(resources.getString("FileError"), resources.getString("FileException"));
}
}
|
java
| 13
| 0.726733
| 85
| 35.142857
| 14
|
/**
* Called to open the user's image within their native OS when the click on the Download object, and
* displays an error message if not possible.
*
* @param file The file describing the downloaded image.
* @param resources A properties file containing key-value language mappings.
*/
|
function
|
def network_orbital_correction(ifgs, degree, offset, params, m_ifgs=None,
preread_ifgs=None):
src_ifgs = ifgs if m_ifgs is None else m_ifgs
src_ifgs = mst.mst_from_ifgs(src_ifgs)[3]
vphase = vstack([i.phase_data.reshape((i.num_cells, 1)) for i in src_ifgs])
vphase = squeeze(vphase)
B = get_network_design_matrix(src_ifgs, degree, offset)
B = B[~isnan(vphase)]
orbparams = dot(pinv(B, 1e-6), vphase[~isnan(vphase)])
ncoef = _get_num_params(degree)
if preread_ifgs:
temp_ifgs = OrderedDict(sorted(preread_ifgs.items())).values()
ids = master_slave_ids(get_all_epochs(temp_ifgs))
else:
ids = master_slave_ids(get_all_epochs(ifgs))
coefs = [orbparams[i:i+ncoef] for i in
range(0, len(set(ids)) * ncoef, ncoef)]
if preread_ifgs:
temp_ifg = Ifg(ifgs[0])
temp_ifg.open()
dm = get_design_matrix(temp_ifg, degree, offset=False)
temp_ifg.close()
else:
dm = get_design_matrix(ifgs[0], degree, offset=False)
for i in ifgs:
if isinstance(i, str):
i = Ifg(i)
i.open(readonly=False)
shared.nan_and_mm_convert(i, params)
_remove_network_orb_error(coefs, dm, i, ids, offset)
|
python
| 15
| 0.591406
| 79
| 41.7
| 30
|
This algorithm implements a network inversion to determine orbital
corrections for a set of interferograms forming a connected network.
Warning: This will write orbital error corrected phase_data to the ifgs.
:param list ifgs: List of Ifg class objects reduced to a minimum spanning
tree network
:param str degree: model to fit (PLANAR / QUADRATIC / PART_CUBIC)
:param bool offset: True to calculate the model using offsets
:param dict params: dictionary of configuration parameters
:param list m_ifgs: list of multilooked Ifg class objects
(sequence must be multilooked versions of 'ifgs' arg)
:param dict preread_ifgs: Dictionary containing information specifically
for MPI jobs (optional)
:return: None - interferogram phase data is updated and saved to disk
|
function
|
public void broadcast(int messageType, int reciever, ArrayList<Node> Nodes){
if(this != Nodes.get(reciever) && !fakeDestination[messageType]){
/*
random object will be used to calculate whether or not a
gossip node will broadcast.
*/
Random selector = new Random();
switch (messageType){
case 0:
if(iseligible(Nodes.get(reciever))){
infect(messageType, Nodes.get(reciever));
if(useFakeDestination)
fakeDestination[messageType] = true;
}
else{
for(int i = 0; i < eligibleNeighbors.get(messageType).size(); ++i){
int node = eligibleNeighbors.get(messageType).get(i);
if(uninfectedNeighbors.get(messageType).contains(node)){
infect(messageType, Nodes.get(node));
}
}
}
break;
case 1:
if(transmitionData[1][1] == 1){
if(iseligible(Nodes.get(reciever))){
infect(messageType, Nodes.get(reciever));
if(useFakeDestination)
fakeDestination[messageType] = true;
}
else{
for(int i = 0; i < eligibleNeighbors.get(messageType).size(); ++i){
int node = eligibleNeighbors.get(messageType).get(i);
infect(messageType, Nodes.get(node));
}
}
}
else if(selector.nextInt(100)>=20){
if(iseligible(Nodes.get(reciever))){
infect(messageType, Nodes.get(reciever));
if(useFakeDestination)
fakeDestination[messageType] = true;
}
else{
for(int i = 0; i < eligibleNeighbors.get(messageType).size(); ++i){
int node = eligibleNeighbors.get(messageType).get(i);
if(uninfectedNeighbors.get(messageType).contains(node)){
infect(messageType, Nodes.get(node));
}
}
}
}
break;
case 2:
if(transmitionData[1][2] == 1){
if(iseligible(Nodes.get(reciever))){
infect(messageType, Nodes.get(reciever));
if(useFakeDestination)
fakeDestination[messageType] = true;
}
else{
for(int i = 0; i < eligibleNeighbors.get(messageType).size(); ++i){
int node = eligibleNeighbors.get(messageType).get(i);
infect(messageType, Nodes.get(node));
}
}
}
else if(selector.nextInt(100)>=40){
if(iseligible(Nodes.get(reciever))){
infect(messageType, Nodes.get(reciever));
if(useFakeDestination)
fakeDestination[messageType] = true;
}
else{
for(int i = 0; i < eligibleNeighbors.get(messageType).size(); ++i){
int node = eligibleNeighbors.get(messageType).get(i);
if(uninfectedNeighbors.get(messageType).contains(node)){
infect(messageType, Nodes.get(node));
}
}
}
}
break;
case 3:
if(iseligible(Nodes.get(reciever))){
infect(Nodes.get(reciever));
if(useFakeDestination)
fakeDestination[messageType] = true;
}
else{
d = uninfectedNeighbors.get(messageType).size();
if(transmitionData[1][3] == 1){
k = uninfectedNeighbors.get(messageType).size();
k2 = Math.pow((uninfectedNeighbors.get(messageType).size()), 2);
for(int i = 0; i < eligibleNeighbors.get(messageType).size(); ++i){
int node = eligibleNeighbors.get(messageType).get(i);
infect(Nodes.get(node));
}
}
else{
if(uninfectedNeighbors.get(messageType).size() > 0){
calculateAlpha();
if(alpha == 1){
for(int i = 0; i < eligibleNeighbors.get(messageType).size(); ++i){
int node = eligibleNeighbors.get(messageType).get(i);
if(uninfectedNeighbors.get(messageType).contains(node)){
infect(Nodes.get(node));
}
}
}
else{
rangeSort(Nodes, uninfectedNeighbors.get(messageType));
double field = uninfectedNeighbors.get(messageType).size();
for(int i = 0; (1.0 - alpha) <= (field/(double)uninfectedNeighbors.get(messageType).size()); i++){
int node = uninfectedNeighbors.get(messageType).get(i);
if(eligibleNeighbors.get(messageType).contains(node)){
infect(Nodes.get(node));
}
field--;
}
}
}
}
}
break;
}
}
}
|
java
| 25
| 0.346111
| 134
| 55.257813
| 128
|
/**
* @description depending on the message type, this method executes the
* corresponding message transfer protocol
* @param messageType 0-3 indicating flood, gossip80, gossip60, or infect
* @param reciever id of the receiving node
* @param Nodes all nodes populating the environment.
*
*/
|
function
|
private boolean isOrsonPDFAvailable() {
Class pdfDocumentClass = null;
try {
pdfDocumentClass = Class.forName("com.orsonpdf.PDFDocument");
} catch (ClassNotFoundException e) {
}
return (pdfDocumentClass != null);
}
|
java
| 10
| 0.611111
| 73
| 32.875
| 8
|
/**
* Returns <code>true</code> if OrsonPDF is on the classpath, and
* <code>false</code> otherwise. The OrsonPDF library can be found at
* http://www.object-refinery.com/pdf/
*
* @return A boolean.
*/
|
function
|
func (module *WrapperModule) ClassifyFlow(flow *types.Flow) (result types.ClassificationResult) {
for _, wrapper := range module.activeWrappers {
if proto, err := wrapper.ClassifyFlow(flow); proto != types.Unknown && err == nil {
result.Protocol = proto
result.Source = wrapper.GetWrapperName()
flow.SetClassificationResult(result.Protocol, result.Source)
return
}
}
return
}
|
go
| 11
| 0.728426
| 97
| 34.909091
| 11
|
// ClassifyFlow applies all the wrappers to a flow and returns the protocol
// that is detected by a wrapper if there is one. Otherwise, it returns the
// Undefined protocol.
|
function
|
public class SSLIncomingStreamReader extends IncomingStreamReader
{
private final DataInputStream input;
public SSLIncomingStreamReader(StreamHeader header, Socket socket, DataInputStream input) throws IOException
{
super(header, socket);
this.input = input;
}
@Override
protected long readnwrite(long length, long bytesRead, long offset, FileChannel fc) throws IOException
{
int toRead = (int)Math.min(FileStreamTask.CHUNK_SIZE, length - bytesRead);
ByteBuffer buf = ByteBuffer.allocate(toRead);
input.readFully(buf.array());
fc.write(buf);
bytesRead += buf.limit();
remoteFile.progress += buf.limit();
return bytesRead;
}
}
|
java
| 11
| 0.681259
| 112
| 32.272727
| 22
|
/**
* This class uses a DataInputStream to read data as opposed to a FileChannel.transferFrom
* used by IncomingStreamReader because the underlying SSLServerSocket doesn't support
* encrypting over NIO SocketChannel.
*/
|
class
|
public String encode(String payloadString)
{
if (payloadString == null || payloadString.isEmpty())
throw new IllegalArgumentException("payload string cannot be empty");
logger.debug("encoding: {}", payloadString);
List<String> payloadstringList = Arrays.asList(payloadString.split(" "));
logger.debug("splited string are: {}", payloadstringList.toString());
byte[] payload = new byte[payloadstringList.size()];
payloadstringList.forEach(e -> Arrays.fill(payload, Byte.parseByte(e,16)));
logger.debug("encoded bytes are: {}", Arrays.toString(payload));
String base64 = Base64.getEncoder().encodeToString(payload);
logger.debug("encoded base64 is: {}", base64);
return base64;
}
|
java
| 11
| 0.659794
| 83
| 54.5
| 14
|
/**
* encode String format payload into byte format.
* @param payloadString payload in String format.
* @return array of byte
* @throws IllegalArgumentException when payload string is null or empty.
*/
|
function
|
public static class AggClusterPerformanceCriteria implements PerformanceCriteria {
private final DynamicBooleanProperty isCritical;
private final DynamicIntProperty queueSize;
private final DynamicIntProperty numThreads;
public AggClusterPerformanceCriteria(String clusterName) {
isCritical = DynamicPropertyFactory.getInstance().getBooleanProperty("turbine.aggCluster.performance.isCritical." + clusterName, true);
queueSize = DynamicPropertyFactory.getInstance().getIntProperty("turbine.aggCluster.performance.queueSize." + clusterName, 10000);
numThreads = DynamicPropertyFactory.getInstance().getIntProperty("turbine.aggCluster.performance.numThreads." + clusterName, 1);
}
@Override
public boolean isCritical() {
return isCritical.get();
}
@Override
public int getMaxQueueSize() {
return queueSize.get();
}
@Override
public int numThreads() {
return numThreads.get();
}
}
|
java
| 11
| 0.677871
| 147
| 40.230769
| 26
|
/**
* Sample criteria that is used by the {@link AggregateClusterMonitor#getEventHandler()} to handle data from multiple {@link InstanceMonitor} connections.
* <p>Note that one can configure the perf size for each cluster that is being run by Turbine using the properties
* <b>turbine.aggCluster.performance.queueSize.[clusterName]</b> and <b>turbine.aggCluster.performance.numThreads.[clusterName]</b>
*
*/
|
class
|
public AffectedComponentSet toExistingSet() {
final ControllerServiceProvider serviceProvider = flowController.getControllerServiceProvider();
final AffectedComponentSet existing = new AffectedComponentSet(flowController);
inputPorts.stream().filter(port -> port.getProcessGroup().getInputPort(port.getIdentifier()) != null).forEach(existing::addInputPort);
outputPorts.stream().filter(port -> port.getProcessGroup().getOutputPort(port.getIdentifier()) != null).forEach(existing::addOutputPort);
remoteInputPorts.stream().filter(port -> port.getProcessGroup().findRemoteGroupPort(port.getIdentifier()) != null).forEach(existing::addRemoteInputPort);
remoteOutputPorts.stream().filter(port -> port.getProcessGroup().findRemoteGroupPort(port.getIdentifier()) != null).forEach(existing::addRemoteOutputPort);
processors.stream().filter(processor -> processor.getProcessGroup().getProcessor(processor.getIdentifier()) != null).forEach(existing::addProcessor);
reportingTasks.stream().filter(task -> flowController.getReportingTaskNode(task.getIdentifier()) != null).forEach(existing::addReportingTask);
controllerServices.stream().filter(service -> serviceProvider.getControllerServiceNode(service.getIdentifier()) != null).forEach(existing::addControllerServiceWithoutReferences);
return existing;
}
|
java
| 13
| 0.768841
| 186
| 114.083333
| 12
|
/**
* Returns a new AffectedComponentSet that represents only those components that currently exist within the NiFi instance. When a set of dataflow updates have occurred, it is very possible
* that one or more components referred to by the AffectedComponentSet no longer exist (for example, there was a dataflow update that removed a Processor, so that Processor no longer exists).
*
* @return an AffectedComponentSet that represents all components within this AffectedComponentSet that currently exist within the NiFi instance. The components contained by the returned
* AffectedComponentSetwill always be a subset or equal to the set of components contained by this.
*/
|
function
|
func HTTPServer(port string, ready chan bool) {
local := fmt.Sprintf("localhost:%s", port)
logger.WithFields(log.Fields{
"local": local,
}).Info("Starting server")
ln, err := net.Listen("tcp", local)
if err != nil {
logger.WithFields(log.Fields{
"local": local,
"error": err,
}).Fatal("Unable to start server")
}
ready <- true
for {
conn, err := ln.Accept()
if err != nil {
logger.WithFields(log.Fields{
"port": port,
"error": err,
}).Warn("Failed to accept connection")
} else {
logger.WithFields(log.Fields{
"port": port,
"remote": conn.RemoteAddr().String(),
}).Debug("Accepted connection")
go handleConnection(conn)
}
}
}
|
go
| 20
| 0.622093
| 47
| 22.758621
| 29
|
// HTTPServer starts an HTTP server listening on the specificed port. HTTP is "supported" only in
// that the server will read up to each newline until a blank line is read at which point it will
// attempt to read the body in segments of 20 bytes. Neither chunk encoding, content length or any
// other methods are supported to accurately read the payload.
|
function
|
selectOne(id, fields) {
const { tableName, primaryKey } = this;
const selectedFields = fields.join('","');
const sql = `SELECT "${selectedFields}" FROM "${tableName}" WHERE "${primaryKey}" = ? LIMIT 1`;
return new Promise(async (resolve, reject) => {
this.connection.get(sql, [id], (error, row) => {
error !== null ? reject(error) : resolve(row);
});
});
}
|
javascript
| 16
| 0.577889
| 99
| 38.9
| 10
|
/**
* Select record by a specific ID in the Database
* @param {Number} id ID of the record
* @param {Array} fields A list of fields in table
* @returns object
*/
|
function
|
@VisibleForTesting
static int calculateHorizontalSpacerWidthForLandscape(
@NonNull FingerprintSensorPropertiesInternal sensorProperties, int displayWidthPx,
int dialogMarginPx, int navbarHorizontalInsetPx) {
final int sensorDistanceFromEdge = displayWidthPx
- sensorProperties.sensorLocationY
- sensorProperties.sensorRadius;
final int horizontalPadding = sensorDistanceFromEdge
- dialogMarginPx
- navbarHorizontalInsetPx;
Log.d(TAG, "Display width: " + displayWidthPx
+ ", Distance from edge: " + sensorDistanceFromEdge
+ ", Dialog margin: " + dialogMarginPx
+ ", Navbar horizontal inset: " + navbarHorizontalInsetPx
+ ", Horizontal spacer width (landscape): " + horizontalPadding);
return horizontalPadding;
}
|
java
| 16
| 0.646733
| 94
| 52.176471
| 17
|
/**
* For devices in landscape orientation where the sensor is too left/right, calculates the
* amount of padding necessary to center the biometric icon within the sensor's physical
* location.
*/
|
function
|
@Test
public void useCase3() throws MessageException, CertificateException, VectorException, CryptoException {
final int psid = 0xcafe;
IEEE1609p2Message.setSelfCertificateFriendlyName("Client");
Certificate clientCert = CertificateManager.get("Client");
assertNotNull(clientCert.getEncryptionPrivateKey());
assertNotNull(clientCert.getSigningPrivateKey());
byte[] recipient = getSdcRecipient();
CryptoProvider cryptoProvider = new CryptoProvider();
IEEE1609p2Message msgSend = new IEEE1609p2Message(cryptoProvider);
msgSend.setPSID(psid);
byte[] signedServiceRequest = msgSend.sign(serviceRequest);
byte[] signedVehSitData = msgSend.sign(vehSitData, false);
byte[] encryptedVehSitData = msgSend.encrypt(vehSitData, recipient);
byte[] clientCertID8 = clientCert.getCertID8();
CertificateManager.remove("Client");
assertNull(CertificateManager.get(clientCertID8));
IEEE1609p2Message.setSelfCertificateFriendlyName("Self");
IEEE1609p2Message msgRecv = IEEE1609p2Message.parse(signedServiceRequest, cryptoProvider);
byte[] recvServiceRequest = msgRecv.getPayload();
assertTrue(Arrays.equals(serviceRequest, recvServiceRequest));
assertEquals(psid, (int)msgRecv.getPSID());
assertEquals(MsgSignerIDType.Certificate.getValue(), msgRecv.getSignerIDType().getValue());
clientCert = CertificateManager.get(clientCertID8);
assertNotNull(clientCert);
assertNull(clientCert.getEncryptionPrivateKey());
assertNull(clientCert.getSigningPrivateKey());
msgRecv = IEEE1609p2Message.parse(signedVehSitData, cryptoProvider);
assertEquals(MsgSignerIDType.DigestEcdsap256.getValue(), msgRecv.getSignerIDType().getValue());
byte[] recvVehSitData = msgRecv.getPayload();
assertTrue(Arrays.equals(vehSitData, recvVehSitData));
msgRecv = IEEE1609p2Message.parse(encryptedVehSitData, cryptoProvider);
assertEquals(MsgSignerIDType.DigestEcdsap256.getValue(), msgRecv.getSignerIDType().getValue());
recvVehSitData = msgRecv.getPayload();
assertTrue(Arrays.equals(vehSitData, recvVehSitData));
long generationTime = msgRecv.getGenerationTime().getTime();
assertTrue(generationTime < new Date().getTime());
}
|
java
| 10
| 0.800558
| 105
| 55.631579
| 38
|
/**
* Use case 3: SDC/SDW receives signed or encrypted messages from a client
* @throws MessageException if message is invalid
* @throws CertificateException if recipient certificate can not be found
* @throws VectorException if vector of recipient was not encoded properly
* @throws CryptoException if symmetric encryption fails
*/
|
function
|
def parse_continue_result(result) -> tuple:
result_code = None
result_message = None
if isinstance(result, int):
result_code = result
elif not isinstance(result, str):
result_len = len(result)
if result_len > 0:
result_code = result[0]
if result_len > 1:
result_message = result[1]
return (result_code, result_message)
|
python
| 11
| 0.53653
| 44
| 35.583333
| 12
|
Parses the result of calling transformer.check_continue and returns
the code and/or message
Arguments:
result: the result from calling transformer.check_continue
Return:
A tuple containing the result code and result message. One or both of these
values in the tuple may be None
Notes:
A string parameter will always return a result code of None and message of None indicating
the caller needs to decide what to do.
An integer parameter will cause the result message value of None, the caller needs to decide
what an appropriate message is.
A parameter that's iterable with a length > 0 will have the first value as the result code and the
second value as the result message. No checks are made for type conformity.
If the parameter is something other than the above, an exception will most likely be thrown.
|
function
|
public bool CanHandleException()
{
foreach (ParameterInfo pi in this._methodInfo.GetParameters())
{
if (pi.ParameterType == typeof(Exception))
{
return true;
}
}
return false;
}
|
c#
| 10
| 0.427184
| 74
| 27.181818
| 11
|
/// <summary>
/// Determines whether the step definition can handle an exception.
/// </summary>
/// <returns>
/// <c>true</c> if the step can handler an exception; otherwise, <c>false</c>.
/// </returns>
|
function
|
@Override
public void createPartControl(final Composite parent) {
final GridLayout gridLayout = new GridLayout();
gridLayout.marginWidth = 0;
gridLayout.marginHeight = 0;
parent.setLayout(gridLayout);
counterComposite = createProgressCountPanel(parent);
counterComposite.setLayoutData(new GridData(GridData.GRAB_HORIZONTAL
| GridData.FILL_BOTH));
benchViewer = new BenchViewer(counterComposite, this);
}
|
java
| 10
| 0.687371
| 76
| 43
| 11
|
/**
* This method creates every intern part of the view, like progress bar or
* the bench viewer.
*
* @see org.eclipse.ui.part.WorkbenchPart#createPartControl(org.eclipse.swt.widgets.Composite)
* @param parent
* The composite of the parent.
*/
|
function
|
public static Vector3 interpolateCatmullRom(float u, float T, Vector3 p0, Vector3 p1, Vector3 p2, Vector3 p3) {
double x, y, z;
x = interpolateCatmullRom(u, T, p0.getX(), p1.getX(), p2.getX(), p3.getX());
y = interpolateCatmullRom(u, T, p0.getY(), p1.getY(), p2.getY(), p3.getY());
z = interpolateCatmullRom(u, T, p0.getZ(), p1.getZ(), p2.getZ(), p3.getZ());
return new Vector3(x, y, z);
}
|
java
| 9
| 0.600462
| 111
| 61
| 7
|
/**
* Interpolate a spline between at least 4 control points following the
* Catmull-Rom equation. here is the interpolation matrix m = [ 0.0 1.0 0.0
* 0.0 ] [-T 0.0 T 0.0 ] [ 2T T-3 3-2T -T ] [-T 2-T T-2 T ] where T is the
* tension of the curve the result is a value between p1 and p2, t=0 for p1,
* t=1 for p2
*
* @param u value from 0 to 1
* @param T The tension of the curve
* @param p0 control point 0
* @param p1 control point 1
* @param p2 control point 2
* @param p3 control point 3
* @return catmull-Rom interpolation
*/
|
function
|
class EventBinding:
"""
A configured event binding on action.
event is the event name
call is a callable that will be invoked with event *args, **kwargs.
processor is used to process the event before invoking the callable.
If processor returns False, callable will not be invoked.
If processor returns a 2-tuple, it will use those as *args, **kwargs to invoke the method.
"""
def __init__(self, event: Union[Callable, str], call: Callable = None, processor: Callable = None):
self.event = event
self.call = call
self.processor = processor
|
python
| 10
| 0.67608
| 103
| 34.470588
| 17
|
A configured event binding on action.
event is the event name
call is a callable that will be invoked with event *args, **kwargs.
processor is used to process the event before invoking the callable.
If processor returns False, callable will not be invoked.
If processor returns a 2-tuple, it will use those as *args, **kwargs to invoke the method.
|
class
|
def pad_zeros(self, signals):
batch_size, sig_len = signals.shape
self.hop_size = self.win_len // 2
rest = self.win_len - (self.hop_size + sig_len % self.win_len) % self.win_len
if rest > 0:
pad = torch.zeros(batch_size, rest).type(signals.type())
signals = torch.cat([signals, pad], 1)
pad_aux = torch.zeros(batch_size, self.hop_size).type(signals.type())
signals = torch.cat([pad_aux, signals, pad_aux], 1)
return signals, rest
|
python
| 11
| 0.587426
| 85
| 50
| 10
|
Pad a batch of signals with zeros before encoding.
Parameters:
signals (tensor): A batch of 1D signals in shape `[B, T]`, where `B` is
the batch size, `T` is the maximum length of the `B` signals.
Returns:
signals (tensor): A batch of padded signals and the length of zeros used for
padding. (in shape `[B, T]`)
rest (int): the redundant spaces created in padding
|
function
|
public class AuthOperationFilter : IOperationFilter
{
public void Apply(OpenApiOperation operation, OperationFilterContext context)
{
var isAuthorized = context.MethodInfo.DeclaringType != null
&& (context.MethodInfo.DeclaringType.GetCustomAttributes(true).OfType<AuthorizeAttribute>().Any()
|| context.MethodInfo.GetCustomAttributes(true).OfType<AuthorizeAttribute>().Any())
&& !context.MethodInfo.GetCustomAttributes(true).OfType<AllowAnonymousAttribute>().Any();
if (!isAuthorized)
{
return;
}
operation.Responses.TryAdd("401", new OpenApiResponse {Description = "Unauthorized"});
operation.Responses.TryAdd("403", new OpenApiResponse {Description = "Forbidden"});
var jwtBearerSchema = new OpenApiSecurityScheme
{
Reference = new OpenApiReference
{
Type = ReferenceType.SecurityScheme,
Id = "bearer"
}
};
operation.Security = new List<OpenApiSecurityRequirement>
{
new OpenApiSecurityRequirement
{
[jwtBearerSchema] = new string[] { }
}
};
}
}
|
c#
| 21
| 0.550405
| 118
| 42.870968
| 31
|
/// <summary>
/// A custom authorization filter to include the padlock icon on protected API endpoints.
/// </summary>
|
class
|
pub fn notify_one(&mut self, thread : &ThreadContext) {
// assure we hold the monitor
assert!(self.is_locked_by_thread(thread.get_tid()));
let mut i = 0;
let len = self.waiters.len();
while i < len {
match self.waiters_prio[i] {
(false, tid, lock_count) => {
self.waiters_prio[i] = (true, tid, lock_count);
return;
},
_ => ()
}
i += 1;
}
}
|
rust
| 15
| 0.561358
| 55
| 23
| 16
|
// ----------------------------------------------
// Notify one wait()ing thread, if any. The corresponding
// thread is unblocked and resumes operation. It automatically
// locks the mutex again.
//
// The monitor must be locked by the current thread.
|
function
|
BOOL
FindLspEntries(
PROVIDER **lspProviders,
int *lspProviderCount,
int *lpErrno
)
{
PROVIDER *Providers = NULL;
LPWSAPROTOCOL_INFOW ProtocolInfo = NULL;
DWORD DummyLspId = 0;
int ProtocolCount = 0,
LayerCount = 0,
idx,
i, j;
*lspProviderCount = 0;
*lspProviders = NULL;
ProtocolInfo = EnumerateProviders( LspCatalogBoth, &ProtocolCount );
if ( NULL == ProtocolInfo )
{
dbgprint("FindLspEntries; EnumerateProviders failed!");
goto cleanup;
}
DummyLspId = 0;
for(i=0; i < ProtocolCount ;i++)
{
if ( 0 == memcmp( &ProtocolInfo[ i ].ProviderId, &gProviderGuid, sizeof( gProviderGuid ) ) )
{
DummyLspId = ProtocolInfo[ i ].dwCatalogEntryId;
break;
}
}
ASSERT( 0 != DummyLspId );
LayerCount = 0;
for(i=0; i < ProtocolCount ;i++)
{
if ( ( ProtocolInfo[ i ].ProtocolChain.ChainLen > 1 ) &&
( DummyLspId == ProtocolInfo[ i ].ProtocolChain.ChainEntries[ 0 ] )
)
{
LayerCount++;
}
}
ASSERT( 0 != LayerCount );
Providers = (PROVIDER *) LspAlloc( sizeof(PROVIDER) * LayerCount, lpErrno );
if ( NULL == Providers )
{
dbgprint("FindLspEntries: LspAlloc failed: %d", *lpErrno );
goto cleanup;
}
idx = 0;
for(i=0; i < ProtocolCount ;i++)
{
if ( ( ProtocolInfo[ i ].ProtocolChain.ChainLen > 1 ) &&
( DummyLspId == ProtocolInfo[ i ].ProtocolChain.ChainEntries[ 0 ] )
)
{
memcpy( &Providers[ idx ].LayerProvider, &ProtocolInfo[ i ], sizeof(WSAPROTOCOL_INFOW) );
Providers[ idx ].LayerProvider.szProtocol[ WSAPROTOCOL_LEN ] = '\0';
for(j=0; j < ProtocolCount ;j++)
{
if ( ProtocolInfo[ i ].ProtocolChain.ChainEntries[ 1 ] ==
ProtocolInfo[ j ].dwCatalogEntryId )
{
memcpy( &Providers[ idx ].NextProvider, &ProtocolInfo[ j ],
sizeof( WSAPROTOCOL_INFOW ) );
Providers[ idx ].NextProvider.szProtocol[ WSAPROTOCOL_LEN ] = '\0';
break;
}
}
ASSERT( j < ProtocolCount );
InitializeCriticalSection( &Providers[ idx ].ProviderCritSec );
InitializeListHead( &Providers[ idx ].SocketList );
Providers[ idx ].LspDummyId = DummyLspId;
idx++;
}
}
ASSERT( idx == LayerCount );
if ( NULL != ProtocolInfo )
FreeProviders( ProtocolInfo );
*lspProviders = Providers;
*lspProviderCount = LayerCount;
return TRUE;
cleanup:
if ( NULL != ProtocolInfo )
FreeProviders( ProtocolInfo );
if ( NULL != Providers )
LspFree( Providers );
return FALSE;
}
|
c++
| 18
| 0.519199
| 101
| 32.662921
| 89
|
//
// Function: FindLspEntries
//
// Description:
// This function searches the Winsock catalog and builds an array of the
// WSAPROTOCOL_INFOW structures which belong to the LSP. This includes
// all layered protocol chains as well as the LSP dummy (hidden) entry.
// The function first finds the dummy entry using 'gProviderGuid' which
// is the global GUID of the dummy entry. From there, the catalog is
// searched for all entries whose first entry in the protocol chain
// matches the dummy entry's catalog ID.
//
|
function
|
public sealed class DataSource : IDataSource
{
#region Variables
private static readonly Regex _regex = new Regex(@"(?<method>.*(?=\()){1}\((?<args>.+?(,\s?|\)))*;?");
private readonly Lazy<IHelperService> _helperService;
private readonly Lazy<ISubscriptionService> _subscriptionService;
private readonly Lazy<IUserService> _userService;
private readonly Lazy<IAccountService> _accountService;
private readonly Lazy<ICalendarService> _calendarService;
private readonly Lazy<IParticipantService> _participantService;
private readonly Lazy<IEventService> _eventService;
private readonly Lazy<IActivityService> _activityService;
private readonly Lazy<IOpeningService> _openingService;
private readonly Lazy<IScheduleService> _scheduleService;
#endregion
#region Properties
internal CoEventContext Context { get; }
public IMapper Mapper { get; }
public IPrincipal Principal { get; }
internal List<Helpers.OpeningAction> Actions { get; } = new List<Helpers.OpeningAction>();
#region Services
public IHelperService Helper { get { return _helperService.Value; } }
public ISubscriptionService Subscriptions { get { return _subscriptionService.Value; } }
public IUserService Users { get { return _userService.Value; } }
public IAccountService Accounts { get { return _accountService.Value; } }
public ICalendarService Calendars { get { return _calendarService.Value; } }
public IParticipantService Participants { get { return _participantService.Value; } }
public IEventService Events { get { return _eventService.Value; } }
public IActivityService Activities { get { return _activityService.Value; } }
public IOpeningService Openings { get { return _openingService.Value; } }
public IScheduleService Schedules { get { return _scheduleService.Value; } }
#endregion
#endregion
#region Constructors
DataSource(IPrincipalAccessor principalAccessor, IMapper mapper, ModelProfile profile)
{
if (principalAccessor == null) throw new ArgumentNullException(nameof(principalAccessor));
this.Principal = principalAccessor.Principal;
profile.BindDataSource(this);
this.Mapper = mapper;
_helperService = new Lazy<IHelperService>(() => new HelperService(this));
_subscriptionService = new Lazy<ISubscriptionService>(() => new SubscriptionService(this));
_userService = new Lazy<IUserService>(() => new UserService(this));
_accountService = new Lazy<IAccountService>(() => new AccountService(this));
_calendarService = new Lazy<ICalendarService>(() => new CalendarService(this));
_participantService = new Lazy<IParticipantService>(() => new ParticipantService(this));
_eventService = new Lazy<IEventService>(() => new EventService(this));
_activityService = new Lazy<IActivityService>(() => new ActivityService(this));
_openingService = new Lazy<IOpeningService>(() => new OpeningService(this));
_scheduleService = new Lazy<IScheduleService>(() => new ScheduleService(this));
}
public DataSource(DbContextOptions<CoEventContext> options, IPrincipalAccessor principalAccessor, IMapper mapper, ModelProfile profile) : this(principalAccessor, mapper, profile)
{
this.Context = new CoEventContext(options);
}
#endregion
#region Methods
public void Migrate()
{
this.Context.Database.Migrate();
}
public bool EnsureCreated()
{
return this.Context.Database.EnsureCreated();
}
public bool EnsureDeleted()
{
return this.Context.Database.EnsureDeleted();
}
public int Commit()
{
var result = this.Context.SaveChanges();
Sync();
return result;
}
public int CommitTransaction()
{
return this.CommitTransaction(this.Context.SaveChanges);
}
public int CommitTransaction(Func<int> action)
{
var success = false;
var actions = this.Actions.Count() > 0;
int result;
using (var transaction = this.Context.Database.BeginTransaction())
{
try
{
result = action?.Invoke() ?? this.Context.SaveChanges();
transaction.Commit();
if (!actions) Sync();
success = true;
}
catch (DbUpdateException)
{
transaction.Rollback();
throw;
}
}
if (success && actions)
{
using (var transaction = this.Context.Database.BeginTransaction())
{
try
{
this.Actions.ForEach(a => PerformAction(a.Process, a.OpeningParticipant));
this.Context.SaveChanges();
transaction.Commit();
Sync();
success = true;
}
catch (DbUpdateException)
{
transaction.Rollback();
throw;
}
finally
{
this.Actions.Clear();
}
}
}
return result;
}
public void CommitTransaction(Action action)
{
this.CommitTransaction(() => { action(); return 0; });
}
private void PerformAction(Entities.Process process, Entities.OpeningParticipant openingParticipant)
{
var matches = _regex.Matches(process.Action);
var opening = process.Opening;
foreach (Match match in matches)
{
var method = match.Groups["method"];
switch (method.Value)
{
case ("Add"):
{
var source = match.Groups["args"]?.Captures[0]?.Value?.Replace(", ", "");
var destination = match.Groups["args"]?.Captures[1]?.Value?.Replace(", ", "");
var condition = match.Groups["args"]?.Captures[2]?.Value?.Replace(")", "");
if (source == "Participant.Answers")
{
if (!String.IsNullOrWhiteSpace(condition))
{
if (condition.StartsWith("Question.Caption"))
{
var cval = condition.Substring(18, condition.Length - 19);
var answers = (
from p in this.Context.Processes
join oq in this.Context.OpeningQuestions on p.OpeningId equals oq.OpeningId
join op in this.Context.OpeningParticipants on p.OpeningId equals op.OpeningId
join oan in this.Context.OpeningAnswers on new { p.OpeningId, op.ParticipantId, oq.QuestionId } equals new { oan.OpeningId, oan.ParticipantId, oan.QuestionId }
where p.Id == process.Id
&& op.ParticipantId == openingParticipant.ParticipantId
&& oq.Question.Caption == cval
select oan.Text);
answers.ForEach(a =>
{
var tag = new Entities.OpeningTag(opening, cval, a);
opening.Tags.Add(tag);
this.Context.OpeningTags.Add(tag);
});
}
}
}
}
break;
case ("Delete"):
{
var source = match.Groups["args"]?.Captures[0]?.Value?.Replace(", ", "");
var condition = match.Groups["args"]?.Captures[1]?.Value?.Replace(")", "");
if (source == "Opening.Tags")
{
if (!String.IsNullOrWhiteSpace(condition))
{
if (condition.StartsWith("Tag.Key"))
{
var cval = condition.Substring(9, condition.Length - 10);
var tags = this.Context.OpeningTags.Where(ot => ot.OpeningId == opening.Id && ot.Key == cval).ToArray();
tags.ForEach(t =>
{
opening.Tags.Remove(t);
this.Context.OpeningTags.Remove(t);
});
}
}
}
}
break;
}
match.NextMatch();
}
}
private void Sync()
{
var type = this.GetType();
var gtype = typeof(Lazy<>);
var fields = type.GetFields(BindingFlags.Instance | BindingFlags.NonPublic).Where(f => f.FieldType.IsGenericType && f.FieldType.GetGenericTypeDefinition() == gtype).ToArray();
foreach (var field in fields)
{
var stype = field.FieldType.GetGenericArguments()[0];
var gstype = gtype.MakeGenericType(stype);
var lazyService = field.GetValue(this);
var isValueCreatedProp = gstype.GetProperty(nameof(Lazy<object>.IsValueCreated));
var valueProp = gstype.GetProperty(nameof(Lazy<object>.Value));
if ((bool)isValueCreatedProp.GetValue(lazyService))
{
var service = valueProp.GetValue(lazyService);
var sync = service.GetType().GetMethod("Sync", BindingFlags.Instance | BindingFlags.NonPublic);
if (sync != null)
{
sync.Invoke(service, null);
}
}
}
}
#endregion
}
|
c#
| 30
| 0.486107
| 203
| 49.227273
| 220
|
// TODO: Controll Tracking
// TODO: Controll size of models (read and write models)
/// <summary>
/// DataSource sealed class, provides a way to interact with the datasource.
/// </summary>
|
class
|
[SuppressMessage(
"Menees.Analyzers",
"MEN005",
Justification = "Many comments.")]
public class UserManager : IUserManager
{
private static readonly IHashGeneratorOptions
DefaultHashGeneratorOptions = new Pbkdf2GeneratorOptions(1500);
private static readonly HashGenerator DefaultHashGenerator
= new Pbkdf2Generator(
(Pbkdf2GeneratorOptions)DefaultHashGeneratorOptions);
private readonly IConfiguration cfg;
private readonly ILogger logger;
private ProntoMiaDbContext dbContext;
public UserManager(
ProntoMiaDbContext dbContext,
IConfiguration cfg,
ILogger<UserManager> logger)
{
this.dbContext = dbContext;
this.cfg = cfg;
this.logger = logger;
}
private int MinPasswordLength =>
this.cfg.GetValue<int>("User:MIN_PASSWORD_LENGTH");
private string SigningKey =>
this.cfg.GetValue<string>("JWT:SIGNING_KEY");
private string Issuer =>
this.cfg.GetValue<string>("JWT:ISSUER");
private string Audience =>
this.cfg.GetValue<string>("JWT:AUDIENCE");
private int ValidForDays =>
this.cfg.GetValue<int>("JWT:VALID_FOR_DAYS");
public void SetDbContext(ProntoMiaDbContext context)
{
this.dbContext = context;
}
public async Task<string> Authenticate(
string userName, string password)
{
var user = await this.GetByUserName(userName);
if (user == default)
{
throw DataAccess.Error.UserNotFound.AsQueryException();
}
await this.CheckPassword(password, user);
this.logger.LogDebug(
"User {UserName} has been authenticated", userName);
return this.GenerateToken(user);
}
public async Task<string> ChangePassword(
int userId, string oldPassword, string newPassword)
{
var user = await this.GetById(userId);
await this.CheckPassword(oldPassword, user);
this.UpdatePassword(newPassword, user);
this.dbContext.Update(user);
await this.dbContext.SaveChangesAsync();
return this.GenerateToken(user);
}
public async Task<User?>
GetByUserName(string userName)
{
var user = await this.dbContext.Users.SingleOrDefaultAsync(
u => u.UserName == userName);
if (user == default)
{
this.logger.LogWarning(
"Invalid username {UserName}", userName);
}
return user;
}
public IQueryable<User> GetAll()
{
return this.dbContext.Users;
}
public async Task<User> Create(
string userName, string password)
{
var checkUserName = await this.GetByUserName(userName);
if (checkUserName != default)
{
throw DataAccess.Error.UserAlreadyExists.AsQueryException();
}
this.CheckPasswordPolicy(password);
var user = new User(
userName,
DefaultHashGenerator.HashPassword(password),
DefaultHashGenerator.GetIdentifier(),
DefaultHashGenerator.GetOptions().ToJson());
this.dbContext.Users.Add(user);
await this.dbContext.SaveChangesAsync();
return user;
}
public async Task<User> Update(
int id, string? userName, string? password)
{
var user = await this.GetById(id);
if (userName == null && password == null)
{
return user;
}
if (userName != null)
{
await this.UpdateUserName(userName, user);
}
if (password != null)
{
this.UpdatePassword(password, user);
}
this.dbContext.Users.Update(user);
await this.dbContext.SaveChangesAsync();
return user;
}
public async Task<int>
Remove(int id)
{
var user = await this.GetById(id);
this.dbContext.Remove(user);
await this.dbContext.SaveChangesAsync();
return id;
}
public async Task<User> GetById(int id)
{
var user = await this.dbContext.Users
.SingleOrDefaultAsync(u => u.Id == id);
if (user != default)
{
return user;
}
this.logger.LogWarning(
"Invalid user id {Id}", id);
throw DataAccess.Error.UserNotFound
.AsQueryException();
}
private string GenerateToken(User user)
{
var tokenHandler = new JwtSecurityTokenHandler();
var key = new SymmetricSecurityKey(
Encoding.UTF8.GetBytes(this.SigningKey));
var credentials = new SigningCredentials(
key, SecurityAlgorithms.HmacSha512);
var claims = new[]
{
new Claim(
ClaimTypes.NameIdentifier, user.Id.ToString()),
new Claim(ClaimTypes.Name, user.UserName),
};
var token = new JwtSecurityToken(
this.Issuer,
this.Audience,
claims,
expires: DateTime.UtcNow.AddDays(this.ValidForDays),
signingCredentials: credentials);
var tokenString = tokenHandler.WriteToken(token);
this.logger.LogDebug(
"Token for user {UserName} has been created",
user.UserName);
return tokenString;
}
private async Task UpdateHash(string password, User user)
{
user.PasswordHash = DefaultHashGenerator.HashPassword(password);
user.HashGenerator = DefaultHashGenerator.GetIdentifier();
user.HashGeneratorOptions =
DefaultHashGenerator.GetOptions().ToJson();
this.dbContext.Users.Update(user);
await this.dbContext.SaveChangesAsync();
this.logger.LogDebug(
"Password-hash for User {UserName} has been updated",
user.UserName);
}
private void CheckPasswordPolicy(string password)
{
var checkPassword = PasswordHelper
.PasswordPolicyMet(password, this.MinPasswordLength);
if (checkPassword != PasswordHelper.PasswordPolicyViolation.None)
{
var arguments = new Dictionary<string, string>
{
["minLength"] = this.MinPasswordLength.ToString(),
["passwordPolicyViolation"] = checkPassword.ToString(),
};
throw DataAccess.Error.PasswordTooWeak
.AsQueryException(arguments);
}
}
private async Task UpdateUserName(string userName, User user)
{
var userCheck = await this.GetByUserName(userName);
if (userCheck != default)
{
throw DataAccess.Error.UserAlreadyExists.AsQueryException();
}
user.UserName = userName;
}
private void UpdatePassword(string password, User user)
{
this.CheckPasswordPolicy(password);
var generator = HashGeneratorFactory.GetGeneratorForUser(user);
user.PasswordHash = generator.HashPassword(password);
}
private async Task CheckPassword(string password, User user)
{
var hashGenerator = HashGeneratorFactory.GetGeneratorForUser(user);
if (!hashGenerator.ValidatePassword(
password, user.PasswordHash))
{
this.logger.LogWarning(
"Invalid password for user {UserName}", user.UserName);
throw DataAccess.Error.WrongPassword.AsQueryException();
}
if (hashGenerator.GetIdentifier() !=
DefaultHashGenerator.GetIdentifier())
{
await this.UpdateHash(password, user);
}
}
}
|
c#
| 18
| 0.549616
| 79
| 37.611872
| 219
|
/// <summary>
/// Class responsible for the lifecycle of a user within the application.
/// </summary>
|
class
|
def compute_statistics(self, mean: Optional[Tensor] = None, std: Optional[Tensor] = None) -> Tuple[Tensor, Tensor]:
if mean is None:
masked_data = self.source_data[self.source_mask]
mean = masked_data.mean(0)
std = masked_data.std(0)
std[std < 1e-5] = 1
mean[~self.event_info.normalized_features] = 0
std[~self.event_info.normalized_features] = 1
self.mean = mean
self.std = std
return mean, std
|
python
| 11
| 0.573146
| 115
| 44.454545
| 11
|
Compute the mean and standard deviation of features with normalization enabled in the event file.
Parameters
----------
mean: Tensor, optional
std: Tensor, optional
Give existing values for mean and standard deviation to set this value
dataset's statistics to those values. This is especially useful for
normalizing the validation and testing datasets with training statistics.
Returns
-------
(Tensor, Tensor)
The new mean and standard deviation for this dataset.
|
function
|
public IPagedList GetPagedList(string search, int pageNumber, int recordsPerPage)
{
IPagedList exts = db.ThingExtensions
.Where(e => search == null || e.Title.Contains(search))
.OrderBy(e => e.Title).ToList()
.ToPagedList(pageNumber, recordsPerPage);
return exts;
}
|
c#
| 22
| 0.582133
| 81
| 42.5
| 8
|
/// <summary>
/// Get paged list of definde Thing Extensions
/// </summary>
/// <param name="search">Search text in the as per 'Title' field</param>
/// <param name="pageNumber">Paging page number.</param>
/// <param name="recordsPerPage">Paging items per page.</param>
/// <returns>Paged list of definde Thing Extensions</returns>
|
function
|
@Override
public CompletableFuture<DialogTurnResult> continueDialog(DialogContext dc) {
if (dc == null) {
return Async.completeExceptionally(new IllegalArgumentException("dc cannot be null."));
}
if (!dc.getContext().getActivity().isType(ActivityTypes.MESSAGE)) {
return CompletableFuture.completedFuture(Dialog.END_OF_TURN);
}
DialogInstance instance = dc.getActiveDialog();
Map<String, Object> state = (Map<String, Object>) instance.getState().get(PERSISTED_STATE);
PromptOptions options = (PromptOptions) instance.getState().get(PERSISTED_OPTIONS);
return onRecognize(dc.getContext(), state, options).thenCompose(recognized -> {
state.put(ATTEMPTCOUNTKEY, (int) state.get(ATTEMPTCOUNTKEY) + 1);
return validateContext(dc, state, options, recognized).thenCompose(isValid -> {
if (isValid) {
return dc.endDialog(recognized.getValue());
}
if (!dc.getContext().getResponded()) {
return onPrompt(dc.getContext(), state, options, true).thenApply(result -> Dialog.END_OF_TURN);
}
return CompletableFuture.completedFuture(Dialog.END_OF_TURN);
});
});
}
|
java
| 21
| 0.616679
| 115
| 53.5
| 24
|
/**
* Called when a prompt dialog is the active dialog and the user replied with a
* new activity.
*
* @param dc The dialog context for the current turn of conversation.
*
* @return A {@link CompletableFuture} representing the asynchronous operation.
*
* If the task is successful, the result indicates whether the dialog is
* still active after the turn has been processed by the dialog. The
* prompt generally continues to receive the user's replies until it
* accepts the user's reply as valid input for the prompt.
*/
|
function
|
protected Collection<Integer> GetResMatch(final TemporalLevel level, final int lbl)
{
final int idx = level.res_findLabel(lbl);
if (idx != -1)
{
return (level.m_res_match[idx]);
}
else
{
HashSet<Integer> tmp = new HashSet<>();
tmp.add(-1);
return (tmp);
}
}
|
java
| 10
| 0.624561
| 83
| 19.428571
| 14
|
/**
* Returns collection of indices of GT labels that matches with given RES lbl,
* or collection with single item (of -1 value) if no such label was found.
*/
|
function
|
func (client *Client) CreateSensorViaAppliance(ctx context.Context, sensor *Sensor, ip net.IP) error {
log.Printf("[DEBUG] sweeping dead sensors...")
if err := client.sweepSensors(); err != nil {
return err
}
time.Sleep(time.Second * 5)
activationCode := sensor.ActivationCode
if activationCode == "" {
log.Printf("[DEBUG] checking license...")
if ok, err := client.HasSensorKeyAvailability(); err != nil {
return err
} else if !ok {
return fmt.Errorf("the AlienVault license in use does not allow creation of more sensors")
}
log.Printf("[DEBUG] creating sensor key...")
var err error
key, err := client.CreateSensorKey()
if err != nil {
return err
}
defer func() {
_ = client.DeleteSensorKey(key)
}()
activationCode = key.ID
}
log.Printf("[DEBUG] waiting for appliance to be created at %s...", ip.String())
if err := client.waitForSensorApplianceCreation(ctx, ip); err != nil {
return err
}
log.Printf("[DEBUG] activating sensor appliance...")
if err := client.activateSensorAppliance(ctx, ip, sensor, activationCode); err != nil {
return err
}
time.Sleep(time.Second * 10)
log.Printf("[DEBUG] finding sensor to finish setup for...")
sensors, err := client.GetSensors()
if err != nil {
return err
}
count := 0
var createdSensor Sensor
for _, s := range sensors {
if s.SetupStatus != SensorSetupStatusComplete && s.Name == sensor.Name {
count++
if count > 1 {
return fmt.Errorf("failed to complete sensor setup as we found more than one sensor with the specified name being set up at the same time, and could differentiate between them")
}
createdSensor = s
}
}
if count == 0 {
return fmt.Errorf("no sensors found ready to be set up")
}
log.Printf("[DEBUG] completing setup...")
sensor.V1ID = createdSensor.V1ID
sensor.V2ID = createdSensor.V2ID
if err := client.completeSetup(&createdSensor); err != nil {
return err
}
log.Printf("[DEBUG] waiting for sensor to be live...")
return client.waitForSensorToBeReady(ctx, sensor)
}
|
go
| 14
| 0.687901
| 181
| 31.677419
| 62
|
// CreateSensorViaAppliance creates a new sensor via the sensor appliance referenced by the provided IP address
|
function
|
async function createProcessesHandlers() {
process.on('unhandledRejection', (err) => {
outputUnexpectedError(err);
process.exit(err.errno);
});
process.on('uncaughtException', (err, promise) => {
console.error(`Unhandled rejection at: ${promise}`);
outputUnexpectedError(err);
process.exit(err.errno);
});
process.on('unhandledRejection', (reason, promise) => {
console.error('Unhandled Rejection at:', promise, 'reason:', reason);
console.error(`Unexpected error, you should never be reading this, please inform us how this happened at: ${
chalk.bold('https://github.com/Josee9988/Implode-CSS/issues')}`);
});
process.on('warning', (warning) => {
console.warn(`Warning found: ${warning}\n`);
console.warn(warning.name); name
console.warn(warning.message); message
console.warn(warning.stack);
});
}
|
javascript
| 16
| 0.626738
| 116
| 41.545455
| 22
|
/**
* Summary: a function that handles multiple events in order to try to avoid unexpected
* errors closing the program unexpectedly without the user knowing what happened.
* Called one time from the cli file and it will registry the events.
*
* @async
* @return {void}
*/
|
function
|
@Override
@SuppressWarnings("deprecation")
public void generateReport(String path, ReportProgressPanel progressPanel) {
progressPanel.setIndeterminate(false);
progressPanel.start();
progressPanel.updateStatusLabel("Querying files...");
reportPath = path + "BodyFile.txt";
currentCase = Case.getCurrentCase();
skCase = currentCase.getSleuthkitCase();
try {
final String query = "type = " + TskData.TSK_DB_FILES_TYPE_ENUM.FS.getFileType()
+ " AND name != '.' AND name != '..'";
progressPanel.updateStatusLabel("Loading files...");
List<FsContent> fs = skCase.findFilesWhere(query);
String ingestwarning = "";
if (IngestManager.getDefault().isIngestRunning()) {
ingestwarning = "Warning, this report was run before ingest services completed!\n";
}
int size = fs.size();
progressPanel.setMaximumProgress(size/100);
BufferedWriter out = null;
try {
out = new BufferedWriter(new FileWriter(reportPath, true));
out.write(ingestwarning);
int count = 0;
for (FsContent file : fs) {
if (progressPanel.getStatus() == ReportStatus.CANCELED) {
break;
}
if(count++ == 100) {
progressPanel.increment();
progressPanel.updateStatusLabel("Now processing " + file.getName() + "...");
count = 0;
}
if(file.getMd5Hash()!=null) {
out.write(file.getMd5Hash());
}
out.write("|");
if(file.getUniquePath()!=null) {
out.write(file.getUniquePath());
}
out.write("|");
out.write(Long.toString(file.getMetaAddr()));
out.write("|");
String modeString = file.getModesAsString();
if(modeString != null) {
out.write(modeString);
}
out.write("|");
out.write(Long.toString(file.getUid()));
out.write("|");
out.write(Long.toString(file.getGid()));
out.write("|");
out.write(Long.toString(file.getSize()));
out.write("|");
out.write(Long.toString(file.getAtime()));
out.write("|");
out.write(Long.toString(file.getMtime()));
out.write("|");
out.write(Long.toString(file.getCtime()));
out.write("|");
out.write(Long.toString(file.getCrtime()));
out.write("\n");
}
} catch (IOException ex) {
logger.log(Level.WARNING, "Could not write the temp body file report.", ex);
} finally {
try {
out.flush();
out.close();
} catch (IOException ex) {
logger.log(Level.WARNING, "Could not flush and close the BufferedWriter.", ex);
}
}
progressPanel.complete();
} catch(TskCoreException ex) {
logger.log(Level.WARNING, "Failed to get the unique path.", ex);
}
}
|
java
| 18
| 0.458739
| 100
| 44.56962
| 79
|
/**
* Generates a body file format report for use with the MAC time tool.
* @param path path to save the report
* @param progressPanel panel to update the report's progress
*/
|
function
|
bool
TextureMapLoader::makeResident(TextureMap* texture)
{
texture->setLastUsed(m_frameCount);
bool isResident = handleMakeResident(texture);
return isResident;
}
|
c++
| 7
| 0.764368
| 51
| 24
| 7
|
/** Start loading a texture; the texture may not be immediately available to use when
* rendering if the texture loader is asynchronous.
*
* \return true if the texture is ready for rendering
*/
|
function
|
func TestChainArbitratorRepublishCommitment(t *testing.T) {
t.Parallel()
tempPath, err := ioutil.TempDir("", "testdb")
if err != nil {
t.Fatal(err)
}
defer os.RemoveAll(tempPath)
db, err := channeldb.Open(tempPath)
if err != nil {
t.Fatal(err)
}
defer db.Close()
const numChans = 10
var channels []*channeldb.OpenChannel
for i := 0; i < numChans; i++ {
lChannel, _, cleanup, err := lnwallet.CreateTestChannels(true)
if err != nil {
t.Fatal(err)
}
defer cleanup()
channel := lChannel.State()
channel.Db = db
addr := &net.TCPAddr{
IP: net.ParseIP("127.0.0.1"),
Port: 18556,
}
if err := channel.SyncPending(addr, 101); err != nil {
t.Fatal(err)
}
channels = append(channels, channel)
}
for i := 0; i < numChans/2; i++ {
closeTx := channels[i].FundingTxn.Copy()
closeTx.TxIn[0].PreviousOutPoint = channels[i].FundingOutpoint
err := channels[i].MarkCommitmentBroadcasted(closeTx)
if err != nil {
t.Fatal(err)
}
}
published := make(map[chainhash.Hash]struct{})
chainArbCfg := ChainArbitratorConfig{
ChainIO: &mockChainIO{},
Notifier: &mockNotifier{},
PublishTx: func(tx *wire.MsgTx) error {
published[tx.TxHash()] = struct{}{}
return nil
},
}
chainArb := NewChainArbitrator(
chainArbCfg, db,
)
if err := chainArb.Start(); err != nil {
t.Fatal(err)
}
defer func() {
if err := chainArb.Stop(); err != nil {
t.Fatal(err)
}
}()
if len(published) != numChans/2 {
t.Fatalf("expected %d re-published transactions, got %d",
numChans/2, len(published))
}
for i := 0; i < numChans/2; i++ {
closeTx := channels[i].FundingTxn.Copy()
closeTx.TxIn[0].PreviousOutPoint = channels[i].FundingOutpoint
_, ok := published[closeTx.TxHash()]
if !ok {
t.Fatalf("closing tx not re-published")
}
delete(published, closeTx.TxHash())
}
if len(published) != 0 {
t.Fatalf("unexpected tx published")
}
}
|
go
| 16
| 0.643497
| 64
| 24
| 76
|
// TestChainArbitratorRepulishCommitment testst that the chain arbitrator will
// republish closing transactions for channels marked CommitementBroadcast in
// the database at startup.
|
function
|
def run_reachability_job (path_to_file, source_vertex, target_vertex, max_depth):
toc = time.time()
g = gl.load_graph(path, 'snap')
result = is_reachable(g, source_vertex, target_vertex, max_depth)
tic = time.time()
if result:
print("Vertex {} is reachable from vertex {}".format(source_vertex, target_vertex))
else:
print("Vertex {} cannot be reached from vertex {}".format(source_vertex, target_vertex))
return "Total runtime: {} seconds".format(tic-toc)
|
python
| 12
| 0.65873
| 96
| 49.5
| 10
|
Determines whether a target vertex is reachable from a source vertex on the specified graph using graphlab's API.
Parameters
----------
path_to_file : String type
The path leading to the edge list file
source_vertex : Long type
The id of the source vertex
target_vertex : Long type
The id of the target vertex
max_depth : int type
The maximum recursion depth
Returns
-------
runtime : String type
The total runtime of the job
|
function
|
TEST_F(PrefixSetTest, EdgeCases) {
std::vector<SBPrefix> prefixes;
SBPrefix prefix = kHighBitSet;
prefixes.push_back(prefix);
unsigned delta = 100 * 1000 * 1000;
for (int i = 0; i < 10; ++i) {
prefix += delta;
prefixes.push_back(prefix);
}
delta = 256 * 256 - 100;
for (int i = 0; i < 200; ++i) {
prefix += delta;
prefixes.push_back(prefix);
prefixes.push_back(prefix);
delta++;
}
delta = 256 * 256 - 1;
prefix = kHighBitClear - delta * 1000;
prefixes.push_back(prefix);
for (int i = 0; i < 1000; ++i) {
prefix += delta;
prefixes.push_back(prefix);
delta--;
}
std::sort(prefixes.begin(), prefixes.end());
PrefixSetBuilder builder(prefixes);
std::unique_ptr<const PrefixSet> prefix_set = builder.GetPrefixSetNoHashes();
std::vector<SBPrefix> prefixes_copy;
prefix_set->GetPrefixes(&prefixes_copy);
prefixes.erase(std::unique(prefixes.begin(), prefixes.end()), prefixes.end());
EXPECT_EQ(prefixes_copy.size(), prefixes.size());
EXPECT_TRUE(
std::equal(prefixes.begin(), prefixes.end(), prefixes_copy.begin()));
EXPECT_FALSE(prefix_set->PrefixExists(kHighBitSet - 100));
EXPECT_FALSE(prefix_set->PrefixExists(kHighBitClear + 100));
for (size_t i = 0; i < prefixes.size(); ++i) {
EXPECT_TRUE(prefix_set->PrefixExists(prefixes[i]));
EXPECT_FALSE(prefix_set->PrefixExists(prefixes[i] - 1));
EXPECT_FALSE(prefix_set->PrefixExists(prefixes[i] + 1));
}
}
|
c++
| 13
| 0.653343
| 80
| 34.414634
| 41
|
// Use artificial inputs to test various edge cases in PrefixExists(). Items
// before the lowest item aren't present. Items after the largest item aren't
// present. Create a sequence of items with deltas above and below 2^16, and
// make sure they're all present. Create a very long sequence with deltas below
// 2^16 to test crossing |kMaxRun|.
|
function
|
pub const unsafe fn new_from_raw(prec: [[AffineStorage; 16]; 64]) -> Self {
Self {
prec,
blind: GEN_BLIND,
initial: GEN_INITIAL,
}
}
|
rust
| 6
| 0.473404
| 75
| 26
| 7
|
/// Create a new `ECMultGenContext` from raw values.
///
/// # Safety
/// The function is unsafe because incorrect value of `pre_g` can lead to
/// crypto logic failure. You most likely do not want to use this function,
/// but `ECMultGenContext::new_boxed`.
|
function
|
@Override
public void writeReauthentication()
throws ReplicationSecurityException, IOException {
if (authenticator != null && authenticator.hasNewToken()) {
Protocol.ReAuthenticate response =
protocol.new ReAuthenticate(authenticator.getToken());
protocol.write(response, replicaFeederChannel);
}
}
|
java
| 11
| 0.659459
| 70
| 40.222222
| 9
|
/**
* Implements the reauthentication response for output thread. It sends
* token to server which would conduct security check for the subscriber
* with the new token.
*
* @throws ReplicationSecurityException if fail to obtain a new login
* token by renewal or reauthentication;
* @throws IOException if fail to write reauth message to channel.
*/
|
function
|
def _custom_stopwords_list(language, force=True):
if force or (language not in language2stopwords):
path_to_file = os.path.join(get_base_dir(), 'databasic', 'logic', 'stopwords', language)
try:
f = open(path_to_file, 'r')
custom_stopwords = [w.strip() for w in f.readlines() if len(w.strip()) > 0]
logger.debug("Loaded {} custom {} stopwords".format(len(custom_stopwords), language))
f.close()
except OSError:
custom_stopwords = []
language2stopwords[language] = custom_stopwords
return language2stopwords[language]
|
python
| 16
| 0.61626
| 97
| 50.333333
| 12
|
We have some extra stopwords that we want to use in some of our languages
:param language: NLTK-compatible name of language
:return: a list of stopwords we added for that language, [] if none to add
|
function
|
public class CryptoIndexWorldPluginRoot implements MarketPriceInterface, Service, CryptoIndexManager, DealsWithErrors, DealsWithPluginFileSystem, DealsWithPluginDatabaseSystem, Plugin {
MarketPrice marketPrice = new MarketPrice();
private CryptoIndexDao cryptoIndexDao;
/**
* Service Interface member variables.
*/
ServiceStatus serviceStatus = ServiceStatus.CREATED;
/**
* DealsWithPlatformDatabaseSystem Interface member variables.
*/
PluginDatabaseSystem pluginDatabaseSystem;
/**
* DealWithEvents Interface member variables.
*/
/**
* DealsWithLogger interface member variable
*/
private LogManager logManager;
private static Map<String, LogLevel> newLoggingLevel = new HashMap<String, LogLevel>();
/**
* DealsWithErrors Interface member variables.
*/
private ErrorManager errorManager;
/**
* Plugin Interface member variables.
*/
UUID pluginId;
@Override
public void start() throws CantStartPluginException {
try {
this.cryptoIndexDao = new CryptoIndexDao(pluginDatabaseSystem, this.pluginId);
cryptoIndexDao.initializeDatabase();
} catch (CantInitializeCryptoIndexDatabaseException e) {
e.printStackTrace();
}
this.serviceStatus = ServiceStatus.STARTED;
}
@Override
public void pause() {
this.serviceStatus = ServiceStatus.PAUSED;
}
@Override
public void resume() {
this.serviceStatus = ServiceStatus.STARTED;
}
@Override
public void stop() {
this.serviceStatus = ServiceStatus.STOPPED;
}
@Override
public ServiceStatus getStatus() {
return this.serviceStatus;
}
/**
* CryptoIndex Interface implementation.
*/
@Override
public double getMarketPrice(FiatCurrency fiatCurrency, CryptoCurrency cryptoCurrency, long time) throws FiatCurrencyNotSupportedException, CryptoCurrencyNotSupportedException {
double price = 0;
try {
/**
* implement the interface to get the last price of market from different providers
*/
price = marketPrice.getMarketPrice(fiatCurrency, cryptoCurrency, 0);
/**
* save in database the last price consulted
*/
String c = cryptoCurrency.getCode();
String f = fiatCurrency.getCode();
cryptoIndexDao.saveLastRateExchange(c, f, price);
} catch (CantSaveLastRateExchangeException cantSaveLastRateExchangeException) {
// TODO manage exceptions
// ok TODO add exception CantGetMarketPriceException
// ok TODO use errorManager to report unexpected exceptions
// ok TODO use generic exceptions for other unexpected exceptions
new CantGetMarketPriceException(CantGetMarketPriceException.DEFAULT_MESSAGE,cantSaveLastRateExchangeException,"CryptoIndex WorldPluginRoot GetMarketPrice","Cant Save Last Rate Exchange Exception");
}catch(Exception exception){
this.errorManager.reportUnexpectedPluginException(Plugins.BITDUBAI_CRYPTO_INDEX, UnexpectedPluginExceptionSeverity.DISABLES_SOME_FUNCTIONALITY_WITHIN_THIS_PLUGIN,exception);
}
return price;
}
/**
* DealWithErrors Interface implementation.
*/
@Override
public void setErrorManager(ErrorManager errorManager) {this.errorManager = errorManager;}
/**
* DealsWithPluginIdentity methods implementation.
*/
@Override
public void setId(UUID pluginId) {
this.pluginId = pluginId;
}
@Override
public void setPluginFileSystem(PluginFileSystem pluginFileSystem) {
}
@Override
public void setPluginDatabaseSystem(PluginDatabaseSystem pluginDatabaseSystem) {
this.pluginDatabaseSystem = pluginDatabaseSystem;
}
/**
* implement the interface MarketPriceInterface
*
* @param cryptoCurrency
* @param fiatCurrency
* @param time
* @return
*/
@Override
public double getHistoricalExchangeRate(CryptoCurrency cryptoCurrency, FiatCurrency fiatCurrency, long time) {
double marketExchangeRate = 0;
try {
marketExchangeRate=marketPrice.getHistoricalExchangeRate(cryptoCurrency,fiatCurrency,time);
} catch (CantGetHistoricalExchangeRateException cantGetHistoricalExchangeRateException) {
new CantGetHistoricalExchangeRateException(CantGetHistoricalExchangeRateException.DEFAULT_MESSAGE,cantGetHistoricalExchangeRateException,"CryptoIndex WorldPluginRoot GetMarketPrice","Cant Get Historical Exchange Rate ");
} catch (HistoricalExchangeRateNotFoundException e) {
new HistoricalExchangeRateNotFoundException(HistoricalExchangeRateNotFoundException.DEFAULT_MESSAGE,e,"CryptoIndex WorldPluginRoot GetMarketPrice","Historical Exchange Rate Not Found Exception");
}catch (Exception exception){
this.errorManager.reportUnexpectedPluginException(Plugins.BITDUBAI_CRYPTO_INDEX, UnexpectedPluginExceptionSeverity.DISABLES_SOME_FUNCTIONALITY_WITHIN_THIS_PLUGIN,exception);
}
return marketExchangeRate;
}
//@Override
public double getHistoricalExchangeRateFromDatabase(CryptoCurrency cryptoCurrency, FiatCurrency fiatCurrency, long time) throws FiatCurrencyNotSupportedException, CryptoCurrencyNotSupportedException, CantGetHistoricalExchangeRateException, HistoricalExchangeRateNotFoundException {
/**
* get market price from database, filtering by time
*/
double marketExchangeRate = 0;
try {
marketExchangeRate =marketPrice.getHistoricalExchangeRateFromDatabase(cryptoCurrency, fiatCurrency, time);
return marketExchangeRate;
} catch (CantGetHistoricalExchangeRateException cantGetHistoricalExchangeRateException) {
new CantGetHistoricalExchangeRateException(CantGetHistoricalExchangeRateException.DEFAULT_MESSAGE,cantGetHistoricalExchangeRateException,"CryptoIndex WorldPluginRoot GetMarketPrice","Cant Get Historical Exchange Rate ");
}catch (HistoricalExchangeRateNotFoundException historicalExchangeRateNotFoundException){
new HistoricalExchangeRateNotFoundException(HistoricalExchangeRateNotFoundException.DEFAULT_MESSAGE,null,"CryptoIndex WorldPluginRoot GetMarketPrice","Historical Exchange Rate Not Found Exception");
}
catch (Exception exception){
this.errorManager.reportUnexpectedPluginException(Plugins.BITDUBAI_CRYPTO_INDEX, UnexpectedPluginExceptionSeverity.DISABLES_SOME_FUNCTIONALITY_WITHIN_THIS_PLUGIN,exception);
}
// TODO manage exceptions
// ok TODO add exception CantGetHistoricalExchangeRateException
// TODO maybe there's no record for the currencies pair: HistoricalExchangeRateNotFoundException
// ok TODO use errorManager to report unexpected exceptions
// ok TODO use generic exceptions for other unexpected exceptions
return marketExchangeRate;
}
}
|
java
| 12
| 0.72318
| 285
| 39.754286
| 175
|
/**
* This plugin mission is to provide the current market price of the different crypto currencies. To accomplish that
* goal, it will check one or more indexes as needed.
* * *
*/
|
class
|
func ErrorBox(err string, vars ...interface{}) template.HTML {
body := `<div class="kepler panel warning icon">` +
`<h3 class="headline">` + html.EscapeString(err) + `</h3>`
if len(vars) >= 1 {
body += "<code>" + html.EscapeString(fmt.Sprint(vars[0])) + "</code></div>"
}
return template.HTML(body)
}
|
go
| 14
| 0.626623
| 77
| 37.625
| 8
|
// ErrorBox returns a styled div error box.
// One optional argument can be passed to show a detailed error code.
// This error code is only shown to the user if the sessions is authenticated
// as developer.
|
function
|
public final void trapException(final Exception exception)
throws NullPointerException {
Checks.checkNotNull(exception);
if (LOG.isDebugEnabled())
LOG.debug("Exception trapped.", exception);
try {
boolean success = getExceptionQueue().add(exception);
assert success;
} catch (IllegalStateException ex) {
throw new AssertionError(ex);
}
}
|
java
| 10
| 0.605442
| 65
| 35.833333
| 12
|
/**
* Called by the encapsulating class when an exception is caught.
*
* @param exception caught exception
* @throws NullPointerException if exception is null
*/
|
function
|
def evaluate_results(result_folder_path, groundtruth_folder_path, type='emerging', eval_result_folder_path=None):
groundtruth = load_gt(os.path.join(groundtruth_folder_path, type))
dict_event_words = dict()
for root, dirs, files in os.walk(result_folder_path):
for dir in dirs:
event_words = load_events(os.path.join(result_folder_path, dir, type + '.txt'))
if event_words is not None:
dict_event_words[dir] = event_words
eval_measures, dict_time_frame_measures = get_eval_measures(dict_event_words, groundtruth,
exact_match=True, coverage_n=1,
one_cluster_m_events=False,
eval_result_folder_path=eval_result_folder_path)
logger.info(f"time_window:[event_n,event_tp,cluster_n,cluster_tp,keyword_n,keyword_tp]")
logger.info(dict_time_frame_measures)
dict_results = dict()
micro_keyword_recall = calculate_recall(eval_measures['keyword_tp'], eval_measures['keyword_n'])
dict_results['keyword recall'] = micro_keyword_recall
logger.info(f'Micro Keyword Recall: {micro_keyword_recall}')
event_recall = calculate_recall(eval_measures['event_tp'], eval_measures['event_n'])
event_precision = calculate_recall(eval_measures['cluster_tp'], eval_measures['cluster_n'])
dict_results['event recall'] = event_recall
dict_results['event relevance'] = event_precision
logger.info(f'Event Recall: {event_recall}')
logger.info(f'event relevance: {event_precision}')
timeframe_recall = calculate_recall(eval_measures['timeframe_tp'], eval_measures['timeframe_n'])
timeframe_precision = calculate_precision(eval_measures['timeframe_tp'], eval_measures['timeframe_fp'])
timeframe_f1 = calculate_f1(timeframe_recall, timeframe_precision)
dict_results['timeframe recall'] = timeframe_recall
dict_results['timeframe precision'] = timeframe_precision
dict_results['timeframe f1'] = timeframe_f1
logger.info(f'Timeframe Recall: {timeframe_recall}')
logger.info(f'Timeframe Precision: {timeframe_precision}')
logger.info(f'Timeframe F1: {timeframe_f1}')
return dict_results
|
python
| 15
| 0.654887
| 113
| 66.441176
| 34
|
Method to compute evaluation measures by comparing results with ground truth (GT).
Files in results folder and GT folder need to be named by time windows formatted similarly.
Keyword Recall(Micro-averaged), Event Recall, Event Precision, Event F1, Timeframe Recall, Timeframe Precision and
Timeframe F1 will be calculated and logged.
parameters
-----------
:param result_folder_path: str
Path to folder which contains extracted events
In this folder there should be separate folders for each time window. Within each time window, there should be
separate .txt files for each event type (e.g. emerging.txt).
Event words need to be written per line of corresponding .txt files (Words are separated by ', ').
:param groundtruth_folder_path: str
Path to GT data folder.
:param type: str, optional
Type of the events need to be evaluated ('emerging')
:param eval_result_folder_path: str, optional
Folder path to save matched event words in each time window.
:return:
|
function
|
public static abstract class StateCallback {
/**
* An error code that can be reported by {@link #onError}
* indicating that the camera device is in use already.
*
* <p>
* This error can be produced when opening the camera fails due to the camera
* being used by a higher-priority camera API client.
* </p>
*
* @see #onError
*/
public static final int ERROR_CAMERA_IN_USE = 1;
/**
* An error code that can be reported by {@link #onError}
* indicating that the camera device could not be opened
* because there are too many other open camera devices.
*
* <p>
* The system-wide limit for number of open cameras has been reached,
* and more camera devices cannot be opened until previous instances are
* closed.
* </p>
*
* <p>
* This error can be produced when opening the camera fails.
* </p>
*
* @see #onError
*/
public static final int ERROR_MAX_CAMERAS_IN_USE = 2;
/**
* An error code that can be reported by {@link #onError}
* indicating that the camera device could not be opened due to a device
* policy.
*
* @see android.app.admin.DevicePolicyManager#setCameraDisabled(android.content.ComponentName, boolean)
* @see #onError
*/
public static final int ERROR_CAMERA_DISABLED = 3;
/**
* An error code that can be reported by {@link #onError}
* indicating that the camera device has encountered a fatal error.
*
* <p>The camera device needs to be re-opened to be used again.</p>
*
* @see #onError
*/
public static final int ERROR_CAMERA_DEVICE = 4;
/**
* An error code that can be reported by {@link #onError}
* indicating that the camera service has encountered a fatal error.
*
* <p>The Android device may need to be shut down and restarted to restore
* camera function, or there may be a persistent hardware problem.</p>
*
* <p>An attempt at recovery <i>may</i> be possible by closing the
* CameraDevice and the CameraManager, and trying to acquire all resources
* again from scratch.</p>
*
* @see #onError
*/
public static final int ERROR_CAMERA_SERVICE = 5;
/** @hide */
@Retention(RetentionPolicy.SOURCE)
@IntDef(prefix = {"ERROR_"}, value =
{ERROR_CAMERA_IN_USE,
ERROR_MAX_CAMERAS_IN_USE,
ERROR_CAMERA_DISABLED,
ERROR_CAMERA_DEVICE,
ERROR_CAMERA_SERVICE })
public @interface ErrorCode {};
/**
* The method called when a camera device has finished opening.
*
* <p>At this point, the camera device is ready to use, and
* {@link CameraDevice#createCaptureSession} can be called to set up the first capture
* session.</p>
*
* @param camera the camera device that has become opened
*/
public abstract void onOpened(@NonNull CameraDevice camera); // Must implement
/**
* The method called when a camera device has been closed with
* {@link CameraDevice#close}.
*
* <p>Any attempt to call methods on this CameraDevice in the
* future will throw a {@link IllegalStateException}.</p>
*
* <p>The default implementation of this method does nothing.</p>
*
* @param camera the camera device that has become closed
*/
public void onClosed(@NonNull CameraDevice camera) {
// Default empty implementation
}
/**
* The method called when a camera device is no longer available for
* use.
*
* <p>This callback may be called instead of {@link #onOpened}
* if opening the camera fails.</p>
*
* <p>Any attempt to call methods on this CameraDevice will throw a
* {@link CameraAccessException}. The disconnection could be due to a
* change in security policy or permissions; the physical disconnection
* of a removable camera device; or the camera being needed for a
* higher-priority camera API client.</p>
*
* <p>There may still be capture callbacks that are invoked
* after this method is called, or new image buffers that are delivered
* to active outputs.</p>
*
* <p>The default implementation logs a notice to the system log
* about the disconnection.</p>
*
* <p>You should clean up the camera with {@link CameraDevice#close} after
* this happens, as it is not recoverable until the camera can be opened
* again. For most use cases, this will be when the camera again becomes
* {@link CameraManager.AvailabilityCallback#onCameraAvailable available}.
* </p>
*
* @param camera the device that has been disconnected
*/
public abstract void onDisconnected(@NonNull CameraDevice camera); // Must implement
/**
* The method called when a camera device has encountered a serious error.
*
* <p>This callback may be called instead of {@link #onOpened}
* if opening the camera fails.</p>
*
* <p>This indicates a failure of the camera device or camera service in
* some way. Any attempt to call methods on this CameraDevice in the
* future will throw a {@link CameraAccessException} with the
* {@link CameraAccessException#CAMERA_ERROR CAMERA_ERROR} reason.
* </p>
*
* <p>There may still be capture completion or camera stream callbacks
* that will be called after this error is received.</p>
*
* <p>You should clean up the camera with {@link CameraDevice#close} after
* this happens. Further attempts at recovery are error-code specific.</p>
*
* @param camera The device reporting the error
* @param error The error code.
*
* @see #ERROR_CAMERA_IN_USE
* @see #ERROR_MAX_CAMERAS_IN_USE
* @see #ERROR_CAMERA_DISABLED
* @see #ERROR_CAMERA_DEVICE
* @see #ERROR_CAMERA_SERVICE
*/
public abstract void onError(@NonNull CameraDevice camera,
@ErrorCode int error); // Must implement
}
|
java
| 10
| 0.587068
| 111
| 39.554878
| 164
|
/**
* A callback objects for receiving updates about the state of a camera device.
*
* <p>A callback instance must be provided to the {@link CameraManager#openCamera} method to
* open a camera device.</p>
*
* <p>These state updates include notifications about the device completing startup (
* allowing for {@link #createCaptureSession} to be called), about device
* disconnection or closure, and about unexpected device errors.</p>
*
* <p>Events about the progress of specific {@link CaptureRequest CaptureRequests} are provided
* through a {@link CameraCaptureSession.CaptureCallback} given to the
* {@link CameraCaptureSession#capture}, {@link CameraCaptureSession#captureBurst},
* {@link CameraCaptureSession#setRepeatingRequest}, or
* {@link CameraCaptureSession#setRepeatingBurst} methods.
*
* @see CameraManager#openCamera
*/
|
class
|
def make_intervals(b3, b4):
b3_intervals = sorted(block2intervals(b3), key=lambda t: t[0])
b4_intervals = sorted(block2intervals(b4), key=lambda t: t[0])
min3 = b3_intervals[0][0]
min4 = b4_intervals[0][0]
bfirst = b3_intervals
bsecond = b4_intervals
if min4 < min3:
bfirst, bsecond = b4_intervals, b3_intervals
intervals = []
for interval in bfirst:
start = interval[0]
stop = interval[1]
for int2 in bsecond:
start2 = int2[0]
stop2 = int2[1]
if start2 <= start and stop2 >= stop:
intervals.append(interval)
elif start2>stop:
pass
elif start2 >= start:
if start2 == start and stop2 == stop:
pass
elif stop2 < stop:
intervals.append((start, start2))
intervals.append((start2, stop2))
start = stop2
elif stop2 >= stop:
intervals.append((start, start2))
intervals.append((start2, stop))
start = stop
stop = stop2
else:
raise RuntimeError()
return intervals
|
python
| 17
| 0.586957
| 64
| 29.257143
| 35
|
Given the already parsed igs block 3 and 4 dictionaries (aka
'3. GNSS Receiver Information' and '4. GNSS Antenna Information') as
parsed from an IgsLogFile instance (see IgsLogFile::parse_block),
concatenate the intervals based on changes either on the 3 or 4 block.
|
function
|
static tree
omp_clause_aligned_alignment (tree clause)
{
if (OMP_CLAUSE_ALIGNED_ALIGNMENT (clause))
return OMP_CLAUSE_ALIGNED_ALIGNMENT (clause);
unsigned int al = 1;
opt_scalar_mode mode_iter;
auto_vector_sizes sizes;
targetm.vectorize.autovectorize_vector_sizes (&sizes);
poly_uint64 vs = 0;
for (unsigned int i = 0; i < sizes.length (); ++i)
vs = ordered_max (vs, sizes[i]);
static enum mode_class classes[]
= { MODE_INT, MODE_VECTOR_INT, MODE_FLOAT, MODE_VECTOR_FLOAT };
for (int i = 0; i < 4; i += 2)
FOR_EACH_MODE_IN_CLASS (mode_iter, classes[i])
{
scalar_mode mode = mode_iter.require ();
machine_mode vmode = targetm.vectorize.preferred_simd_mode (mode);
if (GET_MODE_CLASS (vmode) != classes[i + 1])
continue;
while (maybe_ne (vs, 0U)
&& known_lt (GET_MODE_SIZE (vmode), vs)
&& GET_MODE_2XWIDER_MODE (vmode).exists ())
vmode = GET_MODE_2XWIDER_MODE (vmode).require ();
tree type = lang_hooks.types.type_for_mode (mode, 1);
if (type == NULL_TREE || TYPE_MODE (type) != mode)
continue;
poly_uint64 nelts = exact_div (GET_MODE_SIZE (vmode),
GET_MODE_SIZE (mode));
type = build_vector_type (type, nelts);
if (TYPE_MODE (type) != vmode)
continue;
if (TYPE_ALIGN_UNIT (type) > al)
al = TYPE_ALIGN_UNIT (type);
}
return build_int_cst (integer_type_node, al);
}
|
c
| 12
| 0.642438
| 67
| 34.868421
| 38
|
/* Return alignment to be assumed for var in CLAUSE, which should be
OMP_CLAUSE_ALIGNED. */
|
function
|
func (vm *ChaincodeVM) skipToMatchingBracket(wasIf bool) error {
nesting := 0
https:github.com/ndau/chaincode/pull/81/commits/122aa3b5009590bc488d204289b47800954f316b
refactored the sequence by which the VM is updated during an evaluation.
One consequence of this refactor is that the PC is not incremented until after
the instruction is fully evaluated. Fully evaluating the instruction
includes calls to this function.
It proved simpler to temporarily adjust the PC for the duration of this function
than to rewrite it with different assumptions about the current state of the PC.
vm.pc++
undo the increment on the way out
defer func() {
vm.pc--
}()
for {
instr := vm.code[vm.pc]
extra := extraBytes(vm.code, vm.pc)
vm.pc += extra + 1
switch instr {
case OpIfNZ, OpIfZ:
nesting++
case OpElse:
if nesting == 0 && wasIf {
we're at the right level, so we're done
return nil
}
case OpEndIf:
if nesting > 0 {
nesting--
} else {
we're at the right level so we're done
return nil
}
default:
if vm.pc > len(vm.code) {
fail-safe (should never happen)
panic("VM RAN OFF THE END!")
}
}
}
}
|
go
| 14
| 0.690739
| 90
| 27.731707
| 41
|
// This is only run on VMs that have been validated. It is called when we hit an
// IF that fails (in which case it skips to the instruction after the ELSE if it
// exists, or the ENDIF if it doesn't) or we hit an ELSE in execution, which
// means we should skip to instruction after the corresponding ENDIF.
|
function
|
def guess_carrier(
self,
tracking_number: str
) -> Optional[CarrierEnum]:
for carrier_enum, carrier in self.carrier_registrar.carriers.items():
if re.match(carrier.client.TRACKING_REGEX, tracking_number):
return carrier_enum
return None
|
python
| 10
| 0.607143
| 77
| 37.625
| 8
|
Guess which carrier a tracking number belongs to
Parameters
----------
tracking_number
The tracking number to guess a carrier for.
Returns
-------
Optional[CarrierEnum]
The carrier the tracking number belongs to.
|
function
|
def molecule(g, pos = (0, 0), rot = 30, turn = -1):
l = 80
angle = 60
params = (l, angle)
bb = structure.backbone(g)
t = turtle.Turtle()
t.speed(3)
pen_size = 1
t.pensize(pen_size)
t.up()
t.goto(pos)
t.seth(rot)
t.down()
if list(g.nodes())[0] == 0:
element = g.nodes.data("element")[0]
draw_element(t, element)
if g.degree(0) == 2:
draw_branches(t, g, 0, turn, *params)
for e in bb.edges():
try:
cyclic = g[e[0]][e[1]]["cyclic"]
except KeyError:
cyclic = False
order = g[e[0]][e[1]]["order"]
t.pensize(pen_size + 3 * order)
t.forward(l)
t.ht()
t.pensize(pen_size)
t.left(angle * turn)
if not cyclic:
turn *= -1
n = max(e)
element = g.nodes.data("element")[n]
draw_element(t, element)
if g.degree(n) == 3:
draw_branches(t, g, n, turn, *params, cyclic = cyclic)
elif g.degree(n) > 3:
raise ValueError("Cannot deal with these yet")
|
python
| 13
| 0.481313
| 66
| 27.894737
| 38
|
Parameters:
g, a graph representing a molecule
pos, a tuple representing turtle coordinates
rot, a number representing the current turtle heading (int or float)
turn, 1 or -1, alternates to create linear pieces of molecule
Function:
Main function for drawing the molecules
will draw linear segments until it finds a branchpoint
then it calls draw_branches() to extract the correct branched graph
after which draw_branches() calls draw.molecule again
No return value, but will call itself to draw new branches at branch points if
it finds any
|
function
|
public class EditEventFragment extends DialogFragment implements LocationListener {
private Event event;
private EditText eventName;
private EditText eventComment;
private EditText eventDate;
private EditText eventLocation;
private OnFragmentInteractionalListener listener;
private DatePicker datePicker;
private Button confirmDateButton;
Button openMap;
double currentLatitude = 0;
double currentLongitude = 0;
double latFromMap;
double longFromMap;
private int day;
private int month;
private int year;
public interface OnFragmentInteractionalListener{
void onOkPressed(Event event);
}
/**
* This is used to transfer data of the object Event from ViewEventActivity
* @param event
* @return fragment
* @author Shanshan Wei/swei3
*/
public static EditEventFragment newInstance(Event event){
Bundle args = new Bundle();
args.putSerializable("Event", event);
EditEventFragment fragment = new EditEventFragment();
fragment.setArguments(args);
return fragment;
}
/**
* The start of fragments' lifecycle
* It attaches the fragment
* @param context
* @author Shanshan Wei/swei3
*/
@Override
public void onAttach(Context context){
super.onAttach(context);
if(context instanceof OnFragmentInteractionalListener){
listener = (OnFragmentInteractionalListener) context;
}else{
throw new RuntimeException(context.toString()
+ "must implement OnFragmentInteractionalListener");
}
}
/**
* This builds a dialog fragment to show details of the event and allows them to edit the details.
* @param saveInstanceState
* @return Dialog
* @author Shanshan Wei/swei3
*/
@NonNull
@Override
public Dialog onCreateDialog(@Nullable Bundle saveInstanceState){
View view = LayoutInflater.from(getActivity()).inflate(R.layout.fragment_edit_event, null);
eventName = view.findViewById(R.id.editText_event_name);
eventComment = view.findViewById(R.id.editText_event_comment);
eventDate = view.findViewById(R.id.editText_event_date);
eventLocation = view.findViewById(R.id.editText_event_location);
datePicker = view.findViewById(R.id.event_date_picker);
confirmDateButton = view.findViewById(R.id.confirm_eventDate_button);
openMap = view.findViewById(R.id.editEvent_open_map);
AlertDialog.Builder builder = new AlertDialog.Builder(getContext());
event = (Event) getArguments().getSerializable("Event");
eventName.setText(event.getEventName());
eventComment.setText(event.getDescription());
eventDate.setText(event.getEventDate());
eventLocation.setText(event.getEventLocation());
// Create Date picker
createDatePicker();
// Check location permission
checkLocationPermission();
// Open map
openMap.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View view) {
Intent intent = new Intent(getActivity(), Map.class);
intent.putExtra("currentLat", currentLatitude);
intent.putExtra("currentLong", currentLongitude);
startActivityForResult(intent, 101);
}
});
return builder
.setView(view)
.setTitle("Edit Habit Event")
.setNegativeButton("Cancel", null)
.setPositiveButton("Confirm", new DialogInterface.OnClickListener() {
@Override
public void onClick(DialogInterface dialogInterface, int i) {
String inputName = eventName.getText().toString();
String inputComment = eventComment.getText().toString();
String inputDate = eventDate.getText().toString();
String inputLocation = eventLocation.getText().toString();
Log.d("EditEvent fragment", inputName+inputComment);
event.setEventName(inputName);
event.setDescription(inputComment);
event.setEventDate(inputDate);
event.setEventLocation(inputLocation);
listener.onOkPressed(new Event(inputName, inputComment, inputDate, inputLocation));
}
}).create();
}
/**
* Get result back from Map activity
* @param requestCode
* @param resultCode
* @param data
* @return void
* @author yyang13
*/
@Override
public void onActivityResult(int requestCode, int resultCode, @Nullable Intent data) {
super.onActivityResult(requestCode, resultCode, data);
if (requestCode == 101) {
latFromMap = data.getDoubleExtra("currentLat", 0);
longFromMap = data.getDoubleExtra("currentLong", 0);
if (latFromMap != 0 || longFromMap != 0){
try {
Geocoder geocoder = new Geocoder(getContext());
List<Address> addresses = geocoder.getFromLocation(latFromMap, longFromMap, 1);
String address = addresses.get(0).getAddressLine(0);
eventLocation.setText(address);
}catch (Exception e) {
System.out.println("HEeerrerereererer");
System.out.println("HEeerrerereererer");
System.out.println("HEeerrerereererer");
System.out.println(e.toString());
}
}
}
}
/**
* create DatePicker
* @return void
* @author yyang13
*
*/
public void createDatePicker() {
// Create Date picker
confirmDateButton.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View view) {
day = datePicker.getDayOfMonth();
month = (datePicker.getMonth() + 1);
year = datePicker.getYear();
// Format entered date
eventDate.setText(String.format("%d-%02d-%d", year, month, day));
}
});
}
/**
* Check Location Permission
* @return void
* @author yyang13
*/
private void checkLocationPermission() {
if (ContextCompat.checkSelfPermission(getActivity(), Manifest.permission.ACCESS_FINE_LOCATION)
!= PackageManager.PERMISSION_GRANTED) {
ActivityCompat.requestPermissions(getActivity(), new String[]{
Manifest.permission.ACCESS_FINE_LOCATION
}, 100);
}
}
/**
* Get Address From Location
* @param location
* @return void
* @author yyang13
*/
@Override
public void onLocationChanged(@NonNull Location location) {
try {
Geocoder geocoder = new Geocoder(getActivity(), Locale.getDefault());
currentLatitude = location.getLatitude();
currentLongitude = location.getLongitude();
List<Address> addresses = geocoder.getFromLocation(location.getLatitude(), location.getLongitude(), 1);
String address = addresses.get(0).getAddressLine(0);
eventLocation.setText(address);
}catch (Exception e) {
e.printStackTrace();
}
}
}
|
java
| 18
| 0.599921
| 115
| 32.526549
| 226
|
/**
* This fragment shows the event details with its name and comment
* The user can also edit the event by inputting a new name and a new comment
* @author Shanshan Wei/swei3
* @see ViewEventActivity
* @see Event
*/
|
class
|
public static void toXml(InputStream JSONStream, OutputStream XMLStream, boolean verbose)
throws IOException
{
if (logger.isLoggable(Level.FINER)) {
logger.entering(className, "toXml(InputStream, OutputStream)");
}
if (XMLStream == null) {
throw new NullPointerException("XMLStream cannot be null");
} else if (JSONStream == null) {
throw new NullPointerException("JSONStream cannot be null");
} else {
if (logger.isLoggable(Level.FINEST)) {
logger.logp(Level.FINEST, className, "transform", "Parsing the JSON and a DOM builder.");
}
try {
JSONObject jObject = new JSONObject(JSONStream);
DocumentBuilderFactory dbf = DocumentBuilderFactory.newInstance();
DocumentBuilder dBuilder = dbf.newDocumentBuilder();
Document doc = dBuilder.newDocument();
if (logger.isLoggable(Level.FINEST)) {
logger.logp(Level.FINEST, className, "transform", "Parsing the JSON content to XML");
}
convertJSONObject(doc, doc.getDocumentElement(), jObject, "jsonObject");
TransformerFactory tfactory = TransformerFactory.newInstance();
Transformer serializer = null;
if (verbose) {
serializer = tfactory.newTransformer(new StreamSource( new StringReader(styleSheet) ));;
} else {
serializer = tfactory.newTransformer();
}
Properties oprops = new Properties();
oprops.put(OutputKeys.METHOD, "xml");
oprops.put(OutputKeys.OMIT_XML_DECLARATION, "yes");
oprops.put(OutputKeys.VERSION, "1.0");
oprops.put(OutputKeys.INDENT, "true");
serializer.setOutputProperties(oprops);
serializer.transform(new DOMSource(doc), new StreamResult(XMLStream));
} catch (Exception ex) {
IOException iox = new IOException("Problem during conversion");
iox.initCause(ex);
throw iox;
}
}
if (logger.isLoggable(Level.FINER)) {
logger.exiting(className, "toXml(InputStream, OutputStream)");
}
}
|
java
| 18
| 0.571125
| 108
| 49.12766
| 47
|
/**
* Method to do the transform from an JSON input stream to a XML stream.
* Neither input nor output streams are closed. Closure is left up to the caller.
*
* @param JSONStream The XML stream to convert to JSON
* @param XMLStream The stream to write out JSON to. The contents written to this stream are always in UTF-8 format.
* @param verbose Flag to denote whether or not to render the XML text in verbose (indented easy to read), or compact (not so easy to read, but smaller), format.
*
* @throws IOException Thrown if an IO error occurs.
*/
|
function
|
private static void createPaintFrame() throws Exception {
SwingUtilities.invokeAndWait(new Runnable() {
public void run() {
frame = new PaintFrame();
frame.setDefaultCloseOperation(JFrame.DO_NOTHING_ON_CLOSE);
frame.addWindowListener(new WindowAdapter() {
public void windowClosing(WindowEvent evt) {
try {
fwk.stop();
fwk.waitForStop(0);
} catch (Exception ex) {
System.err.println("Issue stopping framework: " + ex);
}
System.exit(0);
}
});
frame.setVisible(true);
shapeTracker = new ShapeTracker(fwk.getBundleContext(), frame);
shapeTracker.open();
}
});
}
|
java
| 25
| 0.563588
| 71
| 33
| 22
|
/**
* Creates and displays the paint program's paint frame. The embedded
* framework instance and the paint frame are wired together using a
* ShapeTracker instance and the system bundle's BundleContext.
* @throws Exception if any errors occur.
*/
|
function
|
void
index_set_state_flags(Oid indexId, IndexStateFlagsAction action)
{
Relation pg_index;
HeapTuple indexTuple;
Form_pg_index indexForm;
Assert(GetTopTransactionIdIfAny() == InvalidTransactionId);
pg_index = heap_open(IndexRelationId, RowExclusiveLock);
indexTuple = SearchSysCacheCopy(INDEXRELID,
ObjectIdGetDatum(indexId),
0, 0, 0);
if (!HeapTupleIsValid(indexTuple))
elog(ERROR, "cache lookup failed for index %u", indexId);
indexForm = (Form_pg_index) GETSTRUCT(indexTuple);
switch (action)
{
case INDEX_CREATE_SET_READY:
Assert(!indexForm->indisready);
Assert(!indexForm->indisvalid);
indexForm->indisready = true;
break;
case INDEX_CREATE_SET_VALID:
Assert(indexForm->indisready);
Assert(!indexForm->indisvalid);
indexForm->indisvalid = true;
break;
}
heap_inplace_update(pg_index, indexTuple);
heap_close(pg_index, RowExclusiveLock);
}
|
c
| 11
| 0.732301
| 64
| 29.166667
| 30
|
/*
* index_set_state_flags - adjust pg_index state flags
*
* This is used during CREATE INDEX CONCURRENTLY to adjust the pg_index
* flags that denote the index's state. We must use an in-place update of
* the pg_index tuple, because we do not have exclusive lock on the parent
* table and so other sessions might concurrently be doing SnapshotNow scans
* of pg_index to identify the table's indexes. A transactional update would
* risk somebody not seeing the index at all. Because the update is not
* transactional and will not roll back on error, this must only be used as
* the last step in a transaction that has not made any transactional catalog
* updates!
*
* Note that heap_inplace_update does send a cache inval message for the
* tuple, so other sessions will hear about the update as soon as we commit.
*/
|
function
|
public static Type GetResourceType(string resourceType)
{
if(string.IsNullOrEmpty(resourceType))
{
throw new ArgumentNullException("resourceType", Properties.Resources.EXCEPTION_ARGUMENTINVALID);
}
return CoreHelper.GetSystemResourceType(resourceType);
}
|
c#
| 12
| 0.643713
| 112
| 40.875
| 8
|
/// <summary>
/// Returns the resource Type for the given string.
/// </summary>
/// <param name="resourceType">The resource Type string for which the type object is to be returned.</param>
/// <returns>Returns the type of </returns>
|
function
|
@Namespace("nvinfer1") @Properties(inherit = org.bytedeco.tensorrt.presets.nvinfer.class)
public class IPluginV2IOExt extends IPluginV2Ext {
static { Loader.load(); }
/** Pointer cast constructor. Invokes {@link Pointer#Pointer(Pointer)}. */
public IPluginV2IOExt(Pointer p) { super(p); }
/**
* \brief Configure the layer.
*
* This function is called by the builder prior to initialize(). It provides an opportunity for the layer to make
* algorithm choices on the basis of I/O PluginTensorDesc and the maximum batch size.
*
* @param in The input tensors attributes that are used for configuration.
* @param nbInput Number of input tensors.
* @param out The output tensors attributes that are used for configuration.
* @param nbOutput Number of output tensors.
* */
//!
//!
//!
//!
//!
//!
//!
//!
//!
//!
//!
//!
public native void configurePlugin(@Const PluginTensorDesc in, int nbInput, @Const PluginTensorDesc out, int nbOutput);
/**
* \brief Return true if plugin supports the format and datatype for the input/output indexed by pos.
*
* For this method inputs are numbered 0..(nbInputs-1) and outputs are numbered nbInputs..(nbInputs+nbOutputs-1).
* Using this numbering, pos is an index into InOut, where 0 <= pos < nbInputs+nbOutputs-1.
*
* TensorRT invokes this method to ask if the input/output indexed by pos supports the format/datatype specified
* by inOut[pos].format and inOut[pos].type. The override should return true if that format/datatype at inOut[pos]
* are supported by the plugin. If support is conditional on other input/output formats/datatypes, the plugin can
* make its result conditional on the formats/datatypes in inOut[0..pos-1], which will be set to values
* that the plugin supports. The override should not inspect inOut[pos+1..nbInputs+nbOutputs-1],
* which will have invalid values. In other words, the decision for pos must be based on inOut[0..pos] only.
*
* Some examples:
*
* * A definition for a plugin that supports only FP16 NCHW:
*
* return inOut.format[pos] == TensorFormat::kLINEAR && inOut.type[pos] == DataType::kHALF;
*
* * A definition for a plugin that supports only FP16 NCHW for its two inputs,
* and FP32 NCHW for its single output:
*
* return inOut.format[pos] == TensorFormat::kLINEAR && (inOut.type[pos] == pos < 2 ? DataType::kHALF : DataType::kFLOAT);
*
* * A definition for a "polymorphic" plugin with two inputs and one output that supports
* any format or type, but the inputs and output must have the same format and type:
*
* return pos == 0 || (inOut.format[pos] == inOut.format[0] && inOut.type[pos] == inOut.type[0]);
*
* Warning: TensorRT will stop asking for formats once it finds kFORMAT_COMBINATION_LIMIT on combinations.
* */
public native @Cast("bool") boolean supportsFormatCombination(int pos, @Const PluginTensorDesc inOut, int nbInputs, int nbOutputs);
}
|
java
| 11
| 0.658514
| 136
| 47.333333
| 66
|
/** \class IPluginV2IOExt
*
* \brief Plugin class for user-implemented layers.
*
* Plugins are a mechanism for applications to implement custom layers. This interface provides additional
* capabilities to the IPluginV2Ext interface by extending different I/O data types and tensor formats.
*
* @see IPluginV2Ext
* */
|
class
|
public void showStoreIconIPH(int iphTimeout, @StringRes int stringId) {
mUserEducationHelper.requestShowIPH(
new IPHCommandBuilder(mStatusView.getContext().getResources(),
FeatureConstants.PAGE_INFO_STORE_INFO_FEATURE, stringId, stringId)
.setAutoDismissTimeout(iphTimeout)
.setAnchorView(mStatusView)
.setDismissOnTouch(true)
.build());
}
|
java
| 15
| 0.581109
| 90
| 53.222222
| 9
|
/**
* Show the IPH for store icon in omnibox.
* @param iphTimeout The timeout after which the IPH bubble should disappear if it was shown.
* @param stringId Resource id of the string displayed. The string will also be used for
* accessibility.
*/
|
function
|
public bool SetSpriteRectangle(int x, int y, int maxX, int maxY)
{
if ((x < 0) || (y < 0) || (maxX < 0) || (maxY < 0))
return false;
this.spriteRectangle = new Rectangle(x, y, maxX, maxY);
return true;
}
|
c#
| 10
| 0.472325
| 67
| 37.857143
| 7
|
/// <summary>
/// Sets the rectangle for the given sprite.
/// </summary>
/// <param name="x">x coordinate of upper-left corner</param>
/// <param name="y">y coordinate of upper-left corner</param>
/// <param name="maxX">x coordinate of lower-right corner</param>
/// <param name="maxY">y coordinate of lower-right corner</param>
/// <returns></returns>
|
function
|
def merge_child_blocks(genome, chrom_sizes, child_names, block_files, output):
def get_smallest_block(cur_blocks):
return reduce(lambda a, i: a if a.first < i.first else i, [b for b in cur_blocks if b is not None])
def output_reference_insertions(cur_chrom, cur_chrom_idx, cur_pos, chrom, pos):
if chrom != chroms[cur_chrom_idx]:
assert chrom > chroms[cur_chrom_idx]
logger.debug('Finishing off chromosome')
size = chrom_sizes[cur_chrom]
if cur_pos != size:
block = Block([BlockLine(genome, cur_chrom, cur_pos, size, '+', 'X' * (size - cur_pos))])
output.write(str(block))
cur_chrom_idx += 1
cur_chrom = chroms[cur_chrom_idx]
cur_pos = 0
while cur_chrom != chrom:
logger.debug('Outputting block for unaligned chrom %s', cur_chrom)
block = Block([BlockLine(genome, cur_chrom, 0, chrom_sizes[cur_chrom], '+', 'X' * (chrom_sizes[cur_chrom]))])
output.write(str(block))
cur_chrom_idx += 1
cur_chrom = chroms[cur_chrom_idx]
if pos != cur_pos:
logger.debug('Outputting block for reference insertion before next block at %s (cur_pos %s)', pos, cur_pos)
assert pos > cur_pos
block = Block([BlockLine(genome, cur_chrom, cur_pos, pos, '+', 'X' * (pos - cur_pos))])
output.write(str(block))
return cur_chrom, cur_chrom_idx, cur_pos
chroms = sorted(chrom_sizes.keys())
cur_chrom_idx = 0
cur_chrom = chroms[0]
cur_pos = 0
need_next = 'need next block'
block_streams = [merged_dup_stream(f, Block.read_next_from_file) for f in block_files]
cur_blocks = [next(stream, None) for stream in block_streams]
while True:
if all([b is None for b in cur_blocks]):
break
smallest = get_smallest_block(cur_blocks)
cur_chrom, cur_chrom_idx, cur_pos = output_reference_insertions(cur_chrom, cur_chrom_idx, cur_pos, smallest.first.chrom, smallest.first.start)
if not any(smallest.overlaps(b) for b in cur_blocks if b is not None and b != smallest):
logger.debug('Found non-overlapping block')
output.write(str(smallest))
smallest_idx = cur_blocks.index(smallest)
cur_blocks[smallest_idx] = need_next
cur_pos = smallest.first.end
else:
blocks_to_merge = []
split_point = find_split_point(cur_blocks)
assert smallest.first.start < split_point <= smallest.first.end
for i, block in enumerate(cur_blocks):
if block is None:
continue
if smallest.first.overlaps(block.first) and block.first.start < split_point < block.first.end:
left_block, right_block = block.split(split_point - block.first.start)
blocks_to_merge.append(left_block)
cur_blocks[i] = right_block
elif smallest.first.overlaps(block.first) and split_point == block.first.end:
blocks_to_merge.append(block)
cur_blocks[i] = need_next
logger.debug('Merging %s overlapping blocks', len(blocks_to_merge))
growing_block = blocks_to_merge[0]
for block in blocks_to_merge[1:]:
growing_block = growing_block.merge(block)
cur_pos = growing_block.first.end
output.write(str(growing_block))
for i, block_or_sentinel in enumerate(cur_blocks):
if block_or_sentinel == need_next:
cur_blocks[i] = next(block_streams[i], None)
output_reference_insertions(cur_chrom, cur_chrom_idx, cur_pos, chroms[-1], chrom_sizes[chroms[-1]])
|
python
| 19
| 0.584055
| 150
| 55.088235
| 68
|
Merge several block files on the same reference into one file.
All input block files must be sorted. This merges the alignment information
from the blocks "vertically" so that there is exactly 1 block per reference
position, adding in a single-degree block if no input blocks covered a
reference position.
The reference sequence is left as 'X', to be filled in at a later time.
|
function
|
def rotate(self, direction):
if direction == 1:
self.rot = np.matmul(self.rot, [[1.0, 0.0, 0.0], [0.0, 0.9962, -0.0872], [0.0, 0.0872, 0.9962]])
if self.rotcounter[0] + 5 > 360:
self.rotcounter[0] = 0
self.rotcounter[0] += 5
elif direction == -1:
self.rot = np.matmul(self.rot, [[1.0, 0.0, 0.0], [0.0, 0.9962, 0.0872], [0.0, -0.0872, 0.9962]])
if self.rotcounter[0] - 5 < 0:
self.rotcounter[0] = 360
self.rotcounter[0] -= 5
elif direction == 2 and self.ztoggle:
self.rot = np.matmul(self.rot, [[0.9962, -0.0872, 0.0], [0.0872, 0.9962, 0.0], [0.0, 0.0, 1.0]])
if self.rotcounter[2] + 5 > 360:
self.rotcounter[2] = 0
else:
self.rotcounter[2] += 5
elif direction == -2 and self.ztoggle:
self.rot = np.matmul(self.rot, [[0.9962, 0.0872, 0.0], [-0.0872, 0.9962, 0.0], [0.0, 0.0, 1.0]])
if self.rotcounter[2] - 5 < 0:
self.rotcounter[2] = 360
else:
self.rotcounter[2] -= 5
elif direction == 2:
self.rot = np.matmul(self.rot, [[0.9962, 0.0, 0.0872], [0.0, 1.0, 0.0], [-0.0872, 0.0, 0.9962]])
if self.rotcounter[1] + 5 > 360:
self.rotcounter[1] = 0
else:
self.rotcounter[1] += 5
elif direction == -2:
self.rot = np.matmul(self.rot, [[0.9962, 0.0, -0.0872], [0.0, 1.0, 0.0], [0.0872, 0.0, 0.9962]])
if self.rotcounter[1] - 5 < 0:
self.rotcounter[1] = 360
else:
self.rotcounter[1] -= 5
|
python
| 13
| 0.457916
| 108
| 47.571429
| 35
|
Set an internal rotation matrix that is applied to the coordinates before every render.
:param direction: 1 and -1 are x and -x, 2 is either z/y, depending on whether the ztoggle is active or not
|
function
|
public abstract class EditorWindowBase : EditorWindow {
protected ScreenStateMachine screenStateMachine;
protected virtual void OnEnable() { screenStateMachine = CreateWindowStateMachine(); }
protected virtual void OnGUI() {
if (screenStateMachine != null) {
screenStateMachine.DoLayoutNavigation();
screenStateMachine.DoLayoutActiveScreen();
}
}
protected virtual void OnDestroy() {
if (screenStateMachine != null)
screenStateMachine.Dispose();
}
protected abstract ScreenStateMachine CreateWindowStateMachine();
public static EditorWindowBase GetWindowInstance(Type windowType) {
if (!typeof(EditorWindowBase).IsAssignableFrom(windowType)) {
Debug.LogErrorFormat("Unable to get a WindowBase instance from the type '{0}'. Type dosen't inherit from WindowBase", windowType);
return null;
}
EditorWindowBase window = GetWindow(windowType) as EditorWindowBase;
if (window == null) {
Debug.LogErrorFormat("Unable to get a WindowBase instance from the type '{0}'", windowType);
return null;
}
window.Show();
return window;
}
public static TWindow GetWindowInstance<TWindow>() where TWindow : EditorWindowBase { return (TWindow)GetWindowInstance(typeof(TWindow)); }
}
|
c#
| 13
| 0.629905
| 147
| 50
| 29
|
/// <summary>
/// Provide a base point for managers to inherit from and manage the displaying of their information
/// </summary>
|
class
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.