prompt
large_stringlengths 70
991k
| completion
large_stringlengths 0
1.02k
|
---|---|
<|file_name|>source.js<|end_file_name|><|fim▁begin|>import css from './source.css';
__export__ = css;<|fim▁hole|><|fim▁end|> |
export default css; |
<|file_name|>taskController.js<|end_file_name|><|fim▁begin|>/**
* Created by Administrator on 2015/2/3.
*/
var Task = require('../models/task') ;
//add task<|fim▁hole|>exports.addTask = function(req,res){
var title = req.body.title,
content = req.body.content,
date = req.body.date,
duration = req.body.duration,
done = req.body.done,
frequency = req.body.frequency ;
Task.addTask(title,content,date,duration,frequency,done,function(task){
res.json({'status':1,'task':task}) ;
},function(object,error){
res.json({'status':0,'message':error}) ;
}) ;
} ;
//update task
exports.updateTask = function(req,res){
var id = req.params.task_id,
title = req.body.title,
content = req.body.content,
date = req.body.date,
duration = req.body.duration,
frequency = req.body.frequency,
done = req.body.done;
Task.updateTask(id,title,content,date,duration,frequency,done,function(task){
res.json({'status':1,'task':task}) ;
},function(object,error){
res.json({'status':0,'message':error}) ;
}) ;
} ;
//get all tasks
exports.getAllTasks = function(req,res){
Task.findAll(function(tasks){
console.log(tasks.length) ;
res.json({'status':1,'tasks':tasks}) ;
},function(error){
res.json({'status':0,'message':error}) ;
}) ;
} ;
//get task by id
exports.getTaskById = function(req,res){
var id = req.params.task_id ;
Task.findById(id,function(task){
res.json({'status':1,'task':task}) ;
},function(error){
res.json({'status':0,'message':error}) ;
}) ;
} ;
//delete task by id
exports.deleteTask = function(req,res){
var id = req.params.task_id ;
Task.delete(id,function(task){
res.json({'status':1,'task':task}) ;
},function(error){
res.json({'status':0,'message':error}) ;
}) ;
} ;
/*
Task.addTask(title,content,date,duration,frequency,
function(task){
var task_id = task.objectId,
startDate = task.date,
duration = task.duration,
frequency = task.frequency,
date;
//add task records
for(var i = 1 ; i <= duration ; i ++){
//when reach the frequency , continue to next day
if(i % (frequency + 1) === 0)
continue ;
//take current date into consideration,so i must reduce 1
date = dateToInt(afterSomeDays(startDate,i-1)) ;
TaskRecord.addTaskRecord(task_id,date,0,null,
function(obect,error){
//if error happened , remove all records related to task
TaskRecord.deleteTaskRecordByTaskId(task_id) ;
res.json({'status':0,'message':error}) ;
}) ;
}
//return new records and task
TaskRecord.findByTaskId(task.objectId,function(records){
res.json({'status':1,'task':task,'records':records}) ;
},function(error){
res.json({'status':0,'message':error}) ;
}) ;
},function(object,error){
res.json({'status':0,'message':error}) ;
}) ;
*/
/*
Task.updateTask(id,title,content,date,duration,frequency,
function(task){
//update task records
if(reset){
//update task records by resetting all done record
//delete the old records by task id
TaskRecord.deleteTaskRecordByTaskId(id,function(){
//add new task records after delete old task records
var task_id = task.objectId,
startDate = task.date,
duration = task.duration,
frequency = task.frequency,
intDate;
for(var i = 1 ; i <= duration ; i ++){
//when reach the frequency , continue to next day
if(i % (frequency + 1) === 0)
continue ;
//take current date into consideration,so i must reduce 1
intDate = dateToInt(afterSomeDays(startDate,i-1)) ;
TaskRecord.addTaskRecord(task_id,intDate,0,null,
function(object,error){
//if error happened , remove all records related to task
TaskRecord.deleteTaskRecordByTaskId(task_id) ;
res.json({'status':0,'message':error}) ;
}) ;
}
//return new records and task
TaskRecord.findByTaskId(task.objectId,function(records){
res.json({'status':1,'task':task,'records':records}) ;
},function(error){
res.json({'status':0,'message':error}) ;
}) ;
},function(error){
res.json({'status':0,'message':error}) ;
}) ;
}else{
//update task records by overriding the old record
var task_id = task.objectId,
startDate = task.date,
duration = task.duration,
frequency = task.frequency,
intDate;
for(var i = 1 ; i <= duration ; i ++){
//when reach the frequency , delete the exist record
if(i % (frequency + 1) === 0){
intDate = dateToInt(afterSomeDays(startDate,i-1)) ;
TaskRecord.findByTaskIdAndDate(task_id,intDate,function(records){
//exist a record,so delete the exist record
if(records.length !== 0)
records[0].destroy() ;
},function(error){
res.json({'status':0,'message':error}) ;
}) ;
}else{
//take current date into consideration,so i must reduce 1
intDate = dateToInt(afterSomeDays(startDate,i-1)) ;
//not exist a record so add new record
TaskRecord.findByTaskIdAndDate(task_id,intDate,function(records){
if(records.length === 0){
TaskRecord.addTaskRecord(task_id,intDate,0) ;
}
},function(error){
res.json({'status':0,'message':error}) ;
}) ;
}
}
//return new records and task
TaskRecord.findByTaskId(task.objectId,function(records){
res.json({'status':1,'task':task,'records':records}) ;
},function(error){
res.json({'status':0,'message':error}) ;
}) ;
}
},function(object,error){
res.json({'status':0,'message':error}) ;
}) ;
*/<|fim▁end|> | |
<|file_name|>run_local_database.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
"""Command-line tool for starting a local Vitess database for testing.
USAGE:
$ run_local_database --port 12345 \
--topology test_keyspace/-80:test_keyspace_0,test_keyspace/80-:test_keyspace_1 \
--schema_dir /path/to/schema/dir
It will run the tool, logging to stderr. On stdout, a small json structure
can be waited on and then parsed by the caller to figure out how to reach
the vtgate process.
Once done with the test, send an empty line to this process for it to clean-up,
and then just wait for it to exit.
"""
import json
import logging
import optparse
import os
import re
import sys
from vttest import environment
from vttest import local_database
from vttest import mysql_flavor
from vttest import vt_processes
shard_exp = re.compile(r'(.+)/(.+):(.+)')
def main(port, topology, schema_dir, vschema, mysql_only):
shards = []
for shard in topology.split(','):
m = shard_exp.match(shard)
if m:
shards.append(
vt_processes.ShardInfo(m.group(1), m.group(2), m.group(3)))
else:
sys.stderr.write('invalid --shard flag format: %s\n' % shard)
sys.exit(1)
environment.base_port = port
with local_database.LocalDatabase(shards, schema_dir, vschema, mysql_only) as local_db:
print json.dumps(local_db.config())
sys.stdout.flush()
try:
raw_input()
except EOFError:
sys.stderr.write(<|fim▁hole|> ' Instead, stdin was closed and the cluster will be shut down now.'
' Make sure to send the empty line instead to proactively shutdown'
' the local cluster. For example, did you forget the shutdown in'
' your test\'s tearDown()?\n' % os.path.basename(__file__))
if __name__ == '__main__':
parser = optparse.OptionParser()
parser.add_option(
'-p', '--port', type='int',
help='Port to use for vtcombo. If this is 0, a random port '
'will be chosen.')
parser.add_option(
'-t', '--topology',
help='Define which shards exist in the test topology in the'
' form <keyspace>/<shardrange>:<dbname>,... The dbname'
' must be unique among all shards, since they share'
' a MySQL instance in the test environment.')
parser.add_option(
'-s', '--schema_dir',
help='Directory for initial schema files. Within this dir,'
' there should be a subdir for each keyspace. Within'
' each keyspace dir, each file is executed as SQL'
' after the database is created on each shard.'
' If the directory contains a vschema.json file, it'
' will be used as the vschema for the V3 API.')
parser.add_option(
'-e', '--vschema',
help='If this file is specified, it will be used'
' as the vschema for the V3 API.')
parser.add_option(
'-m', '--mysql_only', action='store_true',
help='If this flag is set only mysql is initialized.'
' The rest of the vitess components are not started.'
' Also, the output specifies the mysql unix socket'
' instead of the vtgate port.')
parser.add_option(
'-v', '--verbose', action='store_true',
help='Display extra error messages.')
(options, args) = parser.parse_args()
if options.verbose:
logging.getLogger().setLevel(logging.DEBUG)
# This will set the flavor based on the MYSQL_FLAVOR env var,
# or default to MariaDB.
mysql_flavor.set_mysql_flavor(None)
main(options.port, options.topology, options.schema_dir, options.vschema, options.mysql_only)<|fim▁end|> | 'WARNING: %s: No empty line was received on stdin.' |
<|file_name|>package.py<|end_file_name|><|fim▁begin|># Copyright 2013-2020 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#<|fim▁hole|>from spack import *
class Dtbuild3(Package):
"""Simple package which acts as a build dependency"""
homepage = "http://www.example.com"
url = "http://www.example.com/dtbuild3-1.0.tar.gz"
version('1.0', '0123456789abcdef0123456789abcdef')<|fim▁end|> | # SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
<|file_name|>main-sandbox.component.ts<|end_file_name|><|fim▁begin|>import { Component} from '@angular/core'
<|fim▁hole|>})
export class MainSandBox {
}<|fim▁end|> | @Component({
templateUrl: "./main-sandbox.component.html" |
<|file_name|>conversions.rs<|end_file_name|><|fim▁begin|>use std::str::FromStr;
use super::errors::TcpTransportError;
use super::typedefs::TcpTransportResult;
pub fn as_string(bytes: Vec<u8>) -> TcpTransportResult<String> {
match String::from_utf8(bytes) {
Ok(st) => Ok(st),
Err(_) => Err(TcpTransportError::Utf8Error),
}
}
pub fn as_number<N: FromStr>(bytes: Vec<u8>) -> TcpTransportResult<N> {
let string = try!(as_string(bytes));
match string.parse::<N>() {
Ok(num) => Ok(num),
Err(_) => Err(TcpTransportError::NumberParseError),
}
}
#[cfg(test)]
mod tests {
use tcp_transport::TcpTransportError;
use super::as_number;
use super::as_string;
<|fim▁hole|> assert_eq!(st, "ab".to_string());
// bytestring is not utf8
let err = as_string(vec![b'a', 254, b'b']).unwrap_err();
assert_eq!(err, TcpTransportError::Utf8Error);
}
#[test]
fn test_as_number() {
// bytestring is a number
let num = as_number::<u64>(vec![b'1', b'2']).unwrap();
assert_eq!(num, 12);
// bytestring is not a number
let err = as_number::<u64>(vec![b' ', b'1', b'2']).unwrap_err();
assert_eq!(err, TcpTransportError::NumberParseError);
}
}<|fim▁end|> | #[test]
fn test_as_string() {
// bytestring is utf8
let st = as_string(vec![b'a', b'b']).unwrap(); |
<|file_name|>vector_long16.rs<|end_file_name|><|fim▁begin|>use std;
use ::*;
impl Vector for long16 {
type Scalar = i64;
type Boolean = long16;
type CharVector = char16;
type ShortVector = short16;
type IntVector = int16;
type LongVector = long16;
type UCharVector = uchar16;
type UShortVector = ushort16;
type UIntVector = uint16;
type ULongVector = ulong16;
type FloatVector = float16;
type DoubleVector = double16;
#[inline(always)]
fn map_unary(self, f: &Fn(Self::Scalar) -> Self::Scalar) -> Self {
return long16(f(self.0), f(self.1), f(self.2), f(self.3), f(self.4), f(self.5), f(self.6), f(self.7), f(self.8), f(self.9), f(self.10), f(self.11), f(self.12), f(self.13), f(self.14), f(self.15));
}
#[inline(always)]
fn map_binary(self, other: Self, f: &Fn(Self::Scalar, Self::Scalar) -> Self::Scalar) -> Self {
return long16(f(self.0, other.0), f(self.1, other.1), f(self.2, other.2), f(self.3, other.3), f(self.4, other.4), f(self.5, other.5), f(self.6, other.6), f(self.7, other.7), f(self.8, other.8), f(self.9, other.9), f(self.10, other.10), f(self.11, other.11), f(self.12, other.12), f(self.13, other.13), f(self.14, other.14), f(self.15, other.15));
}<|fim▁hole|> fn reduce(self, f: &Fn(Self::Scalar, Self::Scalar) -> Self::Scalar) -> Self::Scalar {
return f(self.15, f(self.14, f(self.13, f(self.12, f(self.11, f(self.10, f(self.9, f(self.8, f(self.7, f(self.6, f(self.5, f(self.4, f(self.3, f(self.2, f(self.1, self.0)))))))))))))));
}
#[inline(always)]
fn abs(self) -> Self {
let mask = self >> 63;
return (self ^ mask) - mask;
}
#[inline(always)]
fn to_char_sat(self) -> char16 {
return long16::to_char(self.clamp(Self::broadcast(std::i8::MIN as i64), Self::broadcast(std::i8::MAX as i64)));
}
#[inline(always)]
fn to_uchar_sat(self) -> uchar16 {
return long16::to_uchar(self.clamp(Self::broadcast(std::u8::MIN as i64), Self::broadcast(std::u8::MAX as i64)));
}
#[inline(always)]
fn to_short_sat(self) -> short16 {
return long16::to_short(self.clamp(Self::broadcast(std::i16::MIN as i64), Self::broadcast(std::i16::MAX as i64)));
}
#[inline(always)]
fn to_ushort_sat(self) -> ushort16 {
return long16::to_ushort(self.clamp(Self::broadcast(std::u16::MIN as i64), Self::broadcast(std::u16::MAX as i64)));
}
#[inline(always)]
fn to_int_sat(self) -> int16 {
return long16::to_int(self.clamp(Self::broadcast(std::i32::MIN as i64), Self::broadcast(std::i32::MAX as i64)));
}
#[inline(always)]
fn to_uint_sat(self) -> uint16 {
return long16::to_uint(self.clamp(Self::broadcast(std::u32::MIN as i64), Self::broadcast(std::u32::MAX as i64)));
}
#[inline(always)]
fn to_long_sat(self) -> long16 {
return self;
}
#[inline(always)]
fn to_ulong_sat(self) -> ulong16 {
return long16::to_ulong(self.max(Self::from(0)));
}
}
impl Dot<long16> for long16 {
type DotProduct = i64;
#[inline(always)]
fn dot(self, other: Self) -> Self::DotProduct {
return reduce_add(self * other);
}
}
impl Integer for long16 {
type IntegerScalar = i64;
const SIGN_MASK: i64 = std::i64::MIN;
}
impl Select<long16> for long16 {
const MASK_SHIFT: i64 = 63;
#[inline(always)]
fn bitselect(self, a: long16, b: long16) -> long16 {
return (a & !self) | (b & self);
}
}
impl Select<ulong16> for long16 {
const MASK_SHIFT: i64 = 63;
#[inline(always)]
fn bitselect(self, a: ulong16, b: ulong16) -> ulong16 {
return ulong16::bitcast(self.bitselect(long16::bitcast(a), long16::bitcast(b)));
}
}
impl Select<double16> for long16 {
const MASK_SHIFT: i64 = 63;
#[inline(always)]
fn bitselect(self, a: double16, b: double16) -> double16 {
return double16::bitcast(self.bitselect(long16::bitcast(a), long16::bitcast(b)));
}
}
impl long16 {
#[inline(always)]
pub fn lo(self) -> long8 {
return long8(self.0, self.1, self.2, self.3, self.4, self.5, self.6, self.7);
}
#[inline(always)]
pub fn hi(self) -> long8 {
return long8(self.8, self.9, self.10, self.11, self.12, self.13, self.14, self.15);
}
#[inline(always)]
pub fn odd(self) -> long8 {
return long8(self.1, self.3, self.5, self.7, self.9, self.11, self.13, self.15);
}
#[inline(always)]
pub fn even(self) -> long8 {
return long8(self.0, self.2, self.4, self.6, self.8, self.10, self.12, self.14);
}
}<|fim▁end|> |
#[inline(always)] |
<|file_name|>module.py<|end_file_name|><|fim▁begin|>#!/usr/bin/python<|fim▁hole|># Copyright (C) 2009-2012:
# Gabes Jean, [email protected]
# Gerhard Lausser, [email protected]
# Gregory Starck, [email protected]
# Hartmut Goebel, [email protected]
#
# This file is part of Shinken.
#
# Shinken is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Shinken is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with Shinken. If not, see <http://www.gnu.org/licenses/>.
# This Class is a plugin for the Shinken Broker. It is in charge
# to brok information of the service perfdata into the file
# var/service-perfdata
# So it just manage the service_check_return
# Maybe one day host data will be useful too
# It will need just a new file, and a new manager :)
import codecs
from shinken.basemodule import BaseModule
properties = {
'daemons': ['broker'],
'type': 'service_perfdata',
'phases': ['running'],
}
# called by the plugin manager to get a broker
def get_instance(plugin):
print "Get a Service Perfdata broker for plugin %s" % plugin.get_name()
# Catch errors
path = plugin.path
if hasattr(plugin, 'mode'):
mode = plugin.mode
else:
mode = 'a'
if hasattr(plugin, 'template'):
template = plugin.template
else:
template = "$LASTSERVICECHECK$\t$HOSTNAME$\t$SERVICEDESC$\t$SERVICEOUTPUT$\t$SERVICESTATE$\t$SERVICEPERFDATA$\n"
# int(data['last_chk']),data['host_name'], data['service_description'], data['output'], current_state, data['perf_data']
instance = Service_perfdata_broker(plugin, path, mode, template)
return instance
# Class for the Merlindb Broker
# Get broks and puts them in merlin database
class Service_perfdata_broker(BaseModule):
def __init__(self, modconf, path, mode, template):
BaseModule.__init__(self, modconf)
self.path = path
self.mode = mode
self.template = template
# Make some raw change
self.template = self.template.replace(r'\t', '\t')
self.template = self.template.replace(r'\n', '\n')
# In Nagios it's said to force a return in line
if not self.template.endswith('\n'):
self.template += '\n'
self.buffer = []
# Called by Broker so we can do init stuff
# TODO: add conf param to get pass with init
# Conf from arbiter!
def init(self):
print "[%s] I open the service-perfdata file '%s'" % (self.name, self.path)
# Try to open the file to be sure we can
self.file = codecs.open(self.path, self.mode, "utf-8")
self.file.close()
# We've got a 0, 1, 2 or 3 (or something else? ->3
# And want a real OK, WARNING, CRITICAL, etc...
def resolve_service_state(self, state):
states = {0: 'OK', 1: 'WARNING', 2: 'CRITICAL', 3: 'UNKNOWN'}
if state in states:
return states[state]
else:
return 'UNKNOWN'
# A service check have just arrived, we UPDATE data info with this
def manage_service_check_result_brok(self, b):
data = b.data
# The original model
# "$TIMET\t$HOSTNAME\t$SERVICEDESC\t$OUTPUT\t$SERVICESTATE\t$PERFDATA\n"
current_state = self.resolve_service_state(data['state_id'])
macros = {
'$LASTSERVICECHECK$': int(data['last_chk']),
'$HOSTNAME$': data['host_name'],
'$SERVICEDESC$': data['service_description'],
'$SERVICEOUTPUT$': data['output'],
'$SERVICESTATE$': current_state,
'$SERVICEPERFDATA$': data['perf_data'],
'$LASTSERVICESTATE$': data['last_state'],
}
s = self.template
for m in macros:
#print "Replacing in %s %s by %s" % (s, m, str(macros[m]))
s = s.replace(m, unicode(macros[m]))
#s = "%s\t%s\t%s\t%s\t%s\t%s\n" % (int(data['last_chk']),data['host_name'], \
# data['service_description'], data['output'], \
# current_state, data['perf_data'] )
self.buffer.append(s)
# Each second the broker say it's a new second. Let use this to
# dump to the file
def hook_tick(self, brok):
# Go to write it :)
buf = self.buffer
self.buffer = []
try:
self.file = codecs.open(self.path, self.mode, "utf-8")
for s in buf:
self.file.write(s)
self.file.flush()
self.file.close()
except IOError, exp: # Maybe another tool is just getting it, pass
pass<|fim▁end|> |
# -*- coding: utf-8 -*-
|
<|file_name|>borrowed-ptr-pattern-infallible.rs<|end_file_name|><|fim▁begin|>// Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//<|fim▁hole|>// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
pub fn main() {
let (&x, &y, &z) = (&3, &'a', &@"No pets!");
assert!(x == 3);
assert!(y == 'a');
assert!(z == @"No pets!");
}<|fim▁end|> | // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license |
<|file_name|>display.rs<|end_file_name|><|fim▁begin|>use std::fmt::{Display, Error, Formatter, Write};
use super::{Document, KeyMarkup, StringValue, TableKeyMarkup, BoolValue};
use super::{ValueRef, Container, DirectChild, InlineArray, FloatValue};
use super::{ContainerKind, InlineTable, IntegerValue, DatetimeValue, ValueMarkup};
fn fmt_join<'a, T, I>(f: &mut Formatter, values: I, sep: &str)
-> Result<(), Error> where T: Display, I:Iterator<Item=T>{
let mut values = values.peekable();
loop {
let value = values.next();
match value {
Some(value) => {
try!(write!(f, "{}", value));
if values.peek().is_some() {
try!(write!(f, "{}", sep));
}
}
None => break,
}
}
Ok(())
}
fn fmt_with_markup<T>(f: &mut Formatter, value: T, markup: &ValueMarkup)
-> Result<(), Error> where T: Display {
write!(f,
"{}{}{}",
markup.get_leading_trivia(),
value,
markup.get_trailing_trivia())
}
impl Display for Document {
fn fmt(&self, f: &mut Formatter) -> Result<(), Error> {
fmt_join(f, self.iter_children(), "")
.and_then(|_| fmt_join(f, self.iter_containers(), ""))
.and_then(|_| write!(f, "{}", self.get_trailing_trivia()))
}
}
impl Display for DirectChild {
fn fmt(&self, f: &mut Formatter) -> Result<(), Error> {
write!(f, "{}={}", self.key(), self.value())
}
}
impl<'a> Display for ValueRef<'a> {
fn fmt(&self, f: &mut Formatter) -> Result<(), Error> {
match *self {
ValueRef::String(node) => node.fmt(f),
ValueRef::Integer(node) => node.fmt(f),
ValueRef::Float(node) => node.fmt(f),
ValueRef::Boolean(node) => node.fmt(f),
ValueRef::Datetime(node) => node.fmt(f),
ValueRef::Array(arr) => arr.fmt(f),
ValueRef::Table(table) => table.fmt(f),
}
}
}
impl Display for StringValue {
fn fmt(&self, f: &mut Formatter) -> Result<(), Error> {
fmt_with_markup(f, self.raw(), self.markup())
}
}
impl Display for IntegerValue {
fn fmt(&self, f: &mut Formatter) -> Result<(), Error> {
fmt_with_markup(f, self.raw(), self.markup())
}
}
impl Display for BoolValue {
fn fmt(&self, f: &mut Formatter) -> Result<(), Error> {
fmt_with_markup(f, self.get(), self.markup())
}
}
impl Display for DatetimeValue {
fn fmt(&self, f: &mut Formatter) -> Result<(), Error> {
fmt_with_markup(f, self.get(), self.markup())
}
}
impl Display for FloatValue {
fn fmt(&self, f: &mut Formatter) -> Result<(), Error> {
fmt_with_markup(f, self.raw(), self.markup())
}
}
impl Display for InlineArray {
fn fmt(&self, f: &mut Formatter) -> Result<(), Error> {
try!(write!(f, "{}[", self.markup().get_leading_trivia()));
try!(fmt_join(f, self.iter(), ","));
try!(write!(f, "{}", self.markup().get_comma_trivia()));
try!(write!(f, "]{}", self.markup().get_trailing_trivia()));
Ok(())
}
}
impl Display for InlineTable {
fn fmt(&self, f: &mut Formatter) -> Result<(), Error> {
try!(write!(f, "{}{{", self.markup().get_leading_trivia()));
try!(fmt_join(f, self.iter(), ","));
try!(write!(f, "}}{}", self.markup().get_trailing_trivia()));
Ok(())
}
}
impl<'a> Display for KeyMarkup {
fn fmt(&self, f: &mut Formatter) -> Result<(), Error> {
write!(f,
"{}{}{}",
self.get_leading_trivia(),
self.raw(),
self.get_trailing_trivia())
}
}
impl<'a> Display for TableKeyMarkup {
fn fmt(&self, f: &mut Formatter) -> Result<(), Error> {
write!(f,
"{}{}{}",
self.get_leading_trivia(),
self.raw(),
self.get_trailing_trivia())
}
}
impl Display for Container {
fn fmt(&self, f: &mut Formatter) -> Result<(), Error> {
let is_array = match self.kind {
ContainerKind::ArrayMember => true,
ContainerKind::Table => false
};
try!(write!(f, "{}", self.keys().get_leading_trivia()));
if is_array {
try!(write!(f, "[["));
} else {
try!(write!(f, "["));
}
try!(fmt_join(f, self.keys().markup().iter(), "."));
if is_array {
try!(write!(f, "]]"));
} else {
try!(write!(f, "]"));<|fim▁hole|> }
try!(write!(f, "{}", self.keys().get_trailing_trivia()));
fmt_join(f, self.iter_children(), "")
}
}<|fim▁end|> | |
<|file_name|>multicam.py<|end_file_name|><|fim▁begin|>import bpy
from .utils import MultiCamContext
class MultiCamFadeError(Exception):
def __init__(self, msg):
self.msg = msg
def __str__(self):
return repr(self.msg)
class BlendObj(object):
def __init__(self, **kwargs):
self.children = set()
p = self.parent = kwargs.get('parent')
if p is not None:
kwargs.setdefault('context', p.context)
self.context = kwargs.get('context')
self.blend_obj = kwargs.get('blend_obj')
if hasattr(self.__class__, 'fcurve_property'):
self.fcurve_property = self.__class__.fcurve_property
if not hasattr(self, 'fcurve_property'):
self.fcurve_property = kwargs.get('fcurve_property')
@property
def blend_obj(self):
return getattr(self, '_blend_obj', None)
@blend_obj.setter
def blend_obj(self, value):
old = self.blend_obj
if value == old:
return
self._blend_obj = value
self.on_blend_obj_set(value, old)
def on_blend_obj_set(self, new, old):
self._fcurve = None
@property
def context(self):
context = getattr(self, '_context', None)
if context is None:
context = bpy.context
return context
@context.setter
def context(self, value):
old = getattr(self, '_context', None)
if old == value:
return
self._context = value
self.on_context_set(value, old)
def on_context_set(self, new, old):
self._fcurve = None
for obj in self.children:
obj.context = new
@property
def fcurve(self):
fc = getattr(self, '_fcurve', None)
if fc is None:
fc = self._fcurve = self.get_fcurve()
return fc
def get_fcurve(self):
path = self.blend_obj.path_from_id()
action = self.context.scene.animation_data.action
if action is None:
return None
prop = self.fcurve_property
for fc in action.fcurves.values():
if path not in fc.data_path:
continue
if fc.data_path.split('.')[-1] != prop:
continue
return fc
def remove_fcurve(self):
if self.fcurve is None:
return
action = self.context.scene.animation_data.action
action.fcurves.remove(self.fcurve)
self._fcurve = None
def iter_keyframes(self):
for kf in self.fcurve.keyframe_points.values():
yield kf.co
def insert_keyframe(self, frame, value, prop=None, **kwargs):
if prop is None:
prop = self.fcurve_property
if self.fcurve is None:
self.blend_obj.keyframe_insert(prop, frame=frame)
kf = self.get_keyframe(frame)
kf.co[1] = value
else:
kf = self.fcurve.keyframe_points.insert(frame, value)
for key, val in kwargs.items():
setattr(kf, key, val)
return kf
def get_keyframe(self, frame):
for kf in self.fcurve.keyframe_points.values():
if kf.co[0] == frame:
return kf
def add_child(self, cls, **kwargs):
kwargs.setdefault('parent', self)
obj = cls(**kwargs)
self.children.add(obj)
return obj
def del_child(self, obj):
self.children.discard(obj)
class MultiCam(BlendObj):
fcurve_property = 'multicam_source'
def __init__(self, **kwargs):
super(MultiCam, self).__init__(**kwargs)
self.mc_fader = self.add_child(MultiCamFade)
self.cuts = {}
self.strips = {}
def bake_strips(self):
if not len(self.cuts):
self.build_cuts()
self.build_strip_keyframes()
self.blend_obj.mute = True
def build_cuts(self):
for frame, channel in self.iter_keyframes():
self.cuts[frame] = channel
if channel not in self.strips:
self.get_strip_from_channel(channel)
def build_fade(self, fade=None, frame=None):
if fade is None and frame is not None:
fade = self.mc_fader.build_fade(frame)
if fade is None:
return
for channel in range(1, self.blend_obj.channel):
if channel not in self.strips:
self.get_strip_from_channel(channel)
if channel not in self.strips:
continue
self.strips[channel].build_fade(fade)
def build_fades(self):
self.mc_fader.build_fades()
def build_strip_keyframes(self):
for strip in self.strips.values():
strip.build_keyframes()
def get_strip_from_channel(self, channel):
for s in self.context.scene.sequence_editor.sequences:
if s.channel == channel:
source = self.add_child(MulticamSource, blend_obj=s)
self.strips[channel] = source
return source
class MultiCamFade(BlendObj):
def __init__(self, **kwargs):
self.multicam = kwargs.get('parent', kwargs.get('multicam'))
self.fade_props = {}
self.fades = {}
super(MultiCamFade, self).__init__(**kwargs)
if self.blend_obj is None:
self.blend_obj = self.get_fade_prop_group()
def on_blend_obj_set(self, new, old):
for prop in self.fade_props.values():
self.del_child(prop)
self.fade_props.clear()
self.fades.clear()
if new is None:
return
self.get_fade_props()
def get_fade_prop_group(self):
mc_data_path = self.multicam.blend_obj.path_from_id()
return self.context.scene.multicam_fader_properties.get(mc_data_path)
def get_fade_props(self):
action = self.context.scene.animation_data.action
group_name = 'Multicam Fader (%s)' % (self.multicam.blend_obj.name)
group = action.groups.get(group_name)
for fc in group.channels:
key = fc.data_path.split('.')[-1]
fade_prop = self.add_child(MultiCamFadeProp, fcurve_property=key)
self.fade_props[key] = fade_prop
def build_fade(self, frame):
self.build_fades(frame)
return self.fades.get(frame)
def build_fades(self, fade_frame=None):
prop_iters = {}
for key, prop in self.fade_props.items():
prop_iters[key] = prop.iter_keyframes()
def find_next_fade(frame=None):
prop_vals = {'start':{}, 'end':{}}
start_frame = None
try:
for key, prop in prop_iters.items():
frame, value = next(prop)
if start_frame is None:
start_frame = frame
elif frame != start_frame:
raise MultiCamFadeError('keyframes are not aligned: %s' % ({'frame':frame, 'prop_vals':prop_vals}))
prop_vals['start'][key] = value
except StopIteration:
return None, None, None
end_frame = None
for key, prop in prop_iters.items():
frame, value = next(prop)
if end_frame is None:
end_frame = frame
elif frame != end_frame:
raise MultiCamFadeError('keyframes are not aligned: %s' % ({'frame':frame, 'prop_vals':prop_vals}))
prop_vals['end'][key] = value
return start_frame, end_frame, prop_vals
while True:
need_update = False
start_frame, end_frame, prop_vals = find_next_fade()
if start_frame is None:
break
if fade_frame is not None and fade_frame != start_frame:
continue
d = {<|fim▁hole|> 'next_source':prop_vals['start']['next_source'],
}
if start_frame not in self.fades:
need_update = True
self.fades[start_frame] = d
else:
for key, val in self.fades[start_frame].items():
if d[key] != val:
need_update = True
self.fades[start_frame][key] = d[key]
if need_update:
self.multicam.build_fade(d)
if fade_frame is not None:
break
class MultiCamFadeProp(BlendObj):
def __init__(self, **kwargs):
super(MultiCamFadeProp, self).__init__(**kwargs)
self.blend_obj = self.parent.blend_obj
class MulticamSource(BlendObj):
fcurve_property = 'blend_alpha'
def __init__(self, **kwargs):
super(MulticamSource, self).__init__(**kwargs)
self.multicam = self.parent
self.mc_fader = self.multicam.mc_fader
self._keyframe_data = None
@property
def keyframe_data(self):
d = self._keyframe_data
if d is None:
d = self._keyframe_data = self.build_keyframe_data()
return d
def build_keyframe_data(self):
d = {}
cuts = self.multicam.cuts
channel = self.blend_obj.channel
is_active = False
is_first_keyframe = True
for frame in sorted(cuts.keys()):
cut = cuts[frame]
if cut == channel:
d[frame] = True
is_active = True
elif is_active:
d[frame] = False
is_active = False
elif is_first_keyframe:
d[frame] = False
is_first_keyframe = False
return d
def build_fade(self, fade):
channel = self.blend_obj.channel
start_frame = fade['start_frame']
end_frame = fade['end_frame']
start_ch = fade['start_source']
end_ch = fade['next_source']
if channel < min([start_ch, end_ch]):
## this strip won't be affected
return
if start_ch == channel:
if end_ch < channel:
values = [1., 0.]
else:
values = [1., 1.]
elif end_ch == channel:
if start_ch < channel:
values = [0., 1.]
else:
values = [1., 1.]
elif channel > max([start_ch, end_ch]) or channel < max([start_ch, end_ch]):
values = [0., 0.]
else:
return
self.insert_keyframe(start_frame, values[0], interpolation='BEZIER')
self.insert_keyframe(end_frame, values[1], interpolation='CONSTANT')
self.insert_keyframe(end_frame+1, 1., interpolation='CONSTANT')
def build_fades(self):
for start_frame in sorted(self.mc_fader.fades.keys()):
fade = self.mc_fader.fades[start_frame]
self.build_fade(fade)
def build_keyframes(self):
self.remove_fcurve()
for frame, is_active in self.keyframe_data.items():
if is_active:
value = 1.
else:
value = 0.
self.insert_keyframe(frame, value, interpolation='CONSTANT')
class MultiCamBakeStrips(bpy.types.Operator, MultiCamContext):
'''Bakes the mulicam source into the affected strips using opacity'''
bl_idname = 'sequencer.bake_multicam_strips'
bl_label = 'Bake Multicam Strips'
def execute(self, context):
mc = MultiCam(blend_obj=self.get_strip(context),
context=context)
mc.bake_strips()
return {'FINISHED'}
def register():
bpy.utils.register_class(MultiCamBakeStrips)
def unregister():
bpy.utils.unregister_class(MultiCamBakeStrips)<|fim▁end|> | 'start_frame':start_frame,
'end_frame':end_frame,
'start_source':prop_vals['start']['start_source'], |
<|file_name|>DemoController.java<|end_file_name|><|fim▁begin|>package com.yueny.demo.job.controller;
import java.util.List;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Controller;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RequestMethod;
import org.springframework.web.bind.annotation.ResponseBody;
import com.yueny.demo.common.example.bo.ModifyDemoBo;
import com.yueny.demo.common.example.service.IDataPrecipitationService;
import lombok.extern.slf4j.Slf4j;
/**
* @author yueny09 <[email protected]>
*
<|fim▁hole|> */
@Controller
@Slf4j
public class DemoController {
@Autowired
private IDataPrecipitationService dataPrecipitationService;
/**
*
*/
@RequestMapping(value = { "/", "welcome" }, method = RequestMethod.GET)
@ResponseBody
public List<ModifyDemoBo> bar() {
try {
return dataPrecipitationService.queryAll();
} catch (final Exception e) {
log.error("exception:", e);
}
return null;
}
@RequestMapping(value = "/healthy", method = RequestMethod.GET)
@ResponseBody
public String healthy() {
return "OK";
}
}<|fim▁end|> | * @DATE 2016年2月16日 下午8:23:11
*
|
<|file_name|>lib.rs<|end_file_name|><|fim▁begin|>pub mod one;
pub mod two;
pub mod ffigen;<|fim▁hole|>#[no_mangle]
pub extern fn test_u8(p: u8) -> u8 {
p
}
#[no_mangle]
pub extern fn test_u16(p: u16) -> u16 {
p
}
#[no_mangle]
pub extern fn test_u32(p: u32) -> u32 {
p
}
#[no_mangle]
pub extern fn test_i8(p: i8) -> i8 {
p
}
#[no_mangle]
pub extern fn test_i16(p: i16) -> i16 {
p
}
#[no_mangle]
pub extern fn test_i32(p: i32) -> i32 {
p
}
//Float marshaling
#[no_mangle]
pub extern fn test_f32(p: f32) -> f32 {
p
}
#[no_mangle]
pub extern fn test_f64(p: f64) -> f64 {
p
}
//Boolean marshaling
#[no_mangle]
pub extern fn test_bool(p: bool) -> bool {
p == true
}
//String marshaling
#[no_mangle]
pub extern fn test_string(p: String) -> String {
p.clone()
}
#[no_mangle]
pub extern fn test_string_ref(p: &String) -> String {
p.clone()
}
#[no_mangle]
pub extern fn test_str_ref(p: &str) -> String {
p.to_string()
}<|fim▁end|> |
//Integer marshaling |
<|file_name|>GWTUserConfig.java<|end_file_name|><|fim▁begin|>/**<|fim▁hole|> * OpenKM, Open Document Management System (http://www.openkm.com)
* Copyright (c) 2006-2017 Paco Avila & Josep Llort
* <p>
* No bytes were intentionally harmed during the development of this application.
* <p>
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; either version 2 of the License, or
* (at your option) any later version.
* <p>
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
* <p>
* You should have received a copy of the GNU General Public License along
* with this program; if not, write to the Free Software Foundation, Inc.,
* 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
*/
package com.openkm.frontend.client.bean;
import com.google.gwt.user.client.rpc.IsSerializable;
/**
* @author jllort
*
*/
public class GWTUserConfig implements IsSerializable {
private String user = "";
private String homePath = "";
private String homeType = "";
private String homeNode = "";
/**
* GWTUserConfig
*/
public GWTUserConfig() {
}
public String getUser() {
return user;
}
public void setUser(String user) {
this.user = user;
}
public String getHomePath() {
return homePath;
}
public void setHomePath(String homePath) {
this.homePath = homePath;
}
public String getHomeType() {
return homeType;
}
public void setHomeType(String homeType) {
this.homeType = homeType;
}
public String getHomeNode() {
return homeNode;
}
public void setHomeNode(String homeNode) {
this.homeNode = homeNode;
}
}<|fim▁end|> | |
<|file_name|>ru_RU.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
from __future__ import unicode_literals
from .base import * # noqa
# don't use an unicode string
localeID = 'ru_RU'
dateSep = ['-', '.']
timeSep = [':']
meridian = []
usesMeridian = False
uses24 = True
Weekdays = [
'понедельник', 'вторник', 'среда', 'четверг',
'пятница', 'суббота', 'воскресенье',
]
shortWeekdays = [
'пн', 'вт', 'ср', 'чт', 'пт', 'сб', 'вс',
]
# library does not know how to conjugate words
# библиотека не умеет спрягать слова
Months = [
'января', 'февраля', 'марта', 'апреля', 'мая', 'июня', 'июля',
'августа', 'сентября', 'октября', 'ноября', 'декабря',
]
shortMonths = [
'янв', 'фев', 'мрт', 'апр', 'май', 'июн',
'июл', 'авг', 'сен', 'окт', 'нбр', 'дек',
]
dateFormats = {
'full': 'EEEE, dd MMMM yyyy',
'long': 'dd MMMM yyyy',
'medium': 'dd-MM-yyyy',
'short': 'dd-MM-yy',
}
timeFormats = {
'full': 'HH:mm:ss v',
'long': 'HH:mm:ss z',
'medium': 'HH:mm:ss',
'short': 'HH:mm',
}
dp_order = ['d', 'm', 'y']
decimal_mark = '.'
units = {
'seconds': ['секунда', 'секунды', 'секунд', 'сек', 'с'],
'minutes': ['минута', 'минуты', 'минут', 'мин', 'м'],
'hours': ['час', 'часов', 'часа', 'ч'],
'days': ['день', 'дней', 'д'],
'weeks': ['неделя', 'недели', 'н'],
'months': ['месяц', 'месяца', 'мес'],
'years': ['год', 'года', 'годы', 'г'],
}
re_values = re_values.copy()
re_values.update({
'specials': 'om',
'timeseparator': ':',
'rangeseparator': '-',
'daysuffix': 'ого|ой|ий|тье',
'qunits': 'д|мес|г|ч|н|м|с',
'now': ['сейчас'],
})
Modifiers = {
'после': 1,
'назад': -1,
'предыдущий': -1,
'последний': -1,
'далее': 1,
'ранее': -1,
}
dayOffsets = {
'завтра': 1,
'сегодня': 0,
'вчера': -1,
'позавчера': -2,
'послезавтра': 2,
}
re_sources = {
'полдень': {'hr': 12, 'mn': 0, 'sec': 0},
'день': {'hr': 13, 'mn': 0, 'sec': 0},
'обед': {'hr': 12, 'mn': 0, 'sec': 0},
'утро': {'hr': 6, 'mn': 0, 'sec': 0},
'завтрак': {'hr': 8, 'mn': 0, 'sec': 0},
'ужин': {'hr': 19, 'mn': 0, 'sec': 0},
'вечер': {'hr': 18, 'mn': 0, 'sec': 0},
'полночь': {'hr': 0, 'mn': 0, 'sec': 0},
'ночь': {'hr': 21, 'mn': 0, 'sec': 0},
}
small = {
'ноль': 0,
'один': 1,
'два': 2,
'три': 3,
'четыре': 4,
'пять': 5,
'шесть': 6,
'семь': 7,
'восемь': 8,
'девять': 9,
'десять': 10,
'одиннадцать': 11,
'двенадцать': 12,
'тринадцать': 13,
'четырнадцать': 14,
'пятнадцать': 15,
'шестнадцать': 16,
'семнадцать': 17,
'восемнадцать': 18,
'девятнадцать': 19,
'двадцать': 20,
'тридцать': 30,
'сорок': 40,
'пятьдесят': 50,
'шестьдесят': 60,
'семьдесят': 70,
'восемьдесят': 80,
'девяносто': 90,
}
numbers = {
'ноль': 0,
'один': 1,
'два': 2,
'три': 3,
'четыре': 4,
'пять': 5,
'шесть': 6,
'семь': 7,
'восемь': 8,
'девять': 9,
'десять': 10,
'одиннадцать': 11,
'двенадцать': 12,
'тринадцать': 13,
'четырнадцать': 14,
'пятнадцать': 15,
'шестнадцать': 16,
'семнадцать': 17,<|fim▁hole|> 'восемнадцать': 18,
'девятнадцать': 19,
'двадцать': 20,
}
magnitude = {
'тысяча': 1000,
'миллион': 1000000,
'миллиард': 1000000000,
'триллион': 1000000000000,
'квадриллион': 1000000000000000,
'квинтиллион': 1000000000000000000,
'секстиллион': 1000000000000000000000,
'септиллион': 1000000000000000000000000,
'октиллион': 1000000000000000000000000000,
'нониллион': 1000000000000000000000000000000,
'дециллион': 1000000000000000000000000000000000,
}<|fim▁end|> | |
<|file_name|>regress-474771.js<|end_file_name|><|fim▁begin|>/* -*- Mode: C++; tab-width: 2; indent-tabs-mode: nil; c-basic-offset: 2 -*- */
/* This Source Code Form is subject to the terms of the Mozilla Public<|fim▁hole|> * License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
var gTestfile = 'regress-474771.js';
//-----------------------------------------------------------------------------
var BUGNUMBER = 474771;
var summary = 'TM: do not halt execution with gczeal, prototype mangling, for..in';
var actual = '';
var expect = '';
//-----------------------------------------------------------------------------
test();
//-----------------------------------------------------------------------------
function test()
{
enterFunc ('test');
printBugNumber(BUGNUMBER);
printStatus (summary);
expect = 'PASS';
jit(true);
if (typeof gczeal != 'undefined')
{
gczeal(2);
}
Object.prototype.q = 3;
for each (let x in [6, 7]) { } print(actual = "PASS");
jit(false);
delete Object.prototype.q;
reportCompare(expect, actual, summary);
exitFunc ('test');
}<|fim▁end|> | |
<|file_name|>cover.py<|end_file_name|><|fim▁begin|>"""Support for MySensors covers."""
from homeassistant.components import mysensors
from homeassistant.components.cover import ATTR_POSITION, DOMAIN, CoverEntity
from homeassistant.const import STATE_OFF, STATE_ON
async def async_setup_platform(hass, config, async_add_entities, discovery_info=None):
"""Set up the mysensors platform for covers."""
mysensors.setup_mysensors_platform(
hass,
DOMAIN,
discovery_info,
MySensorsCover,
async_add_entities=async_add_entities,<|fim▁hole|>class MySensorsCover(mysensors.device.MySensorsEntity, CoverEntity):
"""Representation of the value of a MySensors Cover child node."""
@property
def assumed_state(self):
"""Return True if unable to access real state of entity."""
return self.gateway.optimistic
@property
def is_closed(self):
"""Return True if cover is closed."""
set_req = self.gateway.const.SetReq
if set_req.V_DIMMER in self._values:
return self._values.get(set_req.V_DIMMER) == 0
return self._values.get(set_req.V_LIGHT) == STATE_OFF
@property
def current_cover_position(self):
"""Return current position of cover.
None is unknown, 0 is closed, 100 is fully open.
"""
set_req = self.gateway.const.SetReq
return self._values.get(set_req.V_DIMMER)
async def async_open_cover(self, **kwargs):
"""Move the cover up."""
set_req = self.gateway.const.SetReq
self.gateway.set_child_value(
self.node_id, self.child_id, set_req.V_UP, 1, ack=1
)
if self.gateway.optimistic:
# Optimistically assume that cover has changed state.
if set_req.V_DIMMER in self._values:
self._values[set_req.V_DIMMER] = 100
else:
self._values[set_req.V_LIGHT] = STATE_ON
self.async_write_ha_state()
async def async_close_cover(self, **kwargs):
"""Move the cover down."""
set_req = self.gateway.const.SetReq
self.gateway.set_child_value(
self.node_id, self.child_id, set_req.V_DOWN, 1, ack=1
)
if self.gateway.optimistic:
# Optimistically assume that cover has changed state.
if set_req.V_DIMMER in self._values:
self._values[set_req.V_DIMMER] = 0
else:
self._values[set_req.V_LIGHT] = STATE_OFF
self.async_write_ha_state()
async def async_set_cover_position(self, **kwargs):
"""Move the cover to a specific position."""
position = kwargs.get(ATTR_POSITION)
set_req = self.gateway.const.SetReq
self.gateway.set_child_value(
self.node_id, self.child_id, set_req.V_DIMMER, position, ack=1
)
if self.gateway.optimistic:
# Optimistically assume that cover has changed state.
self._values[set_req.V_DIMMER] = position
self.async_write_ha_state()
async def async_stop_cover(self, **kwargs):
"""Stop the device."""
set_req = self.gateway.const.SetReq
self.gateway.set_child_value(
self.node_id, self.child_id, set_req.V_STOP, 1, ack=1
)<|fim▁end|> | )
|
<|file_name|>forms.py<|end_file_name|><|fim▁begin|>from crispy_forms.helper import FormHelper
from crispy_forms.layout import Fieldset, Layout
from django import forms
from django.contrib.auth.forms import AuthenticationForm
from django.contrib.auth.models import User
from django.contrib.auth.password_validation import validate_password
from django.core.exceptions import ValidationError
from django.db import transaction
from django.forms import ModelForm
from django.utils.translation import ugettext_lazy as _
from django_filters import FilterSet
from easy_select2 import Select2
from crispy_layout_mixin import form_actions, to_row
from utils import (TIPO_TELEFONE, YES_NO_CHOICES, get_medicos,
get_or_create_grupo)
from .models import Especialidade, EspecialidadeMedico, Usuario
class EspecialidadeMedicoFilterSet(FilterSet):
class Meta:
model = EspecialidadeMedico
fields = ['especialidade']
def __init__(self, *args, **kwargs):
super(EspecialidadeMedicoFilterSet, self).__init__(*args, **kwargs)
row1 = to_row([('especialidade', 12)])
self.form.helper = FormHelper()
self.form.helper.form_method = 'GET'
self.form.helper.layout = Layout(
Fieldset(_('Pesquisar Médico'),
row1, form_actions(save_label='Filtrar'))
)
class MudarSenhaForm(forms.Form):
nova_senha = forms.CharField(
label="Nova Senha", max_length=30,
widget=forms.PasswordInput(
attrs={'class': 'form-control form-control-lg',
'name': 'senha',
'placeholder': 'Nova Senha'}))
confirmar_senha = forms.CharField(
label="Confirmar Senha", max_length=30,
widget=forms.PasswordInput(
attrs={'class': 'form-control form-control-lg',
'name': 'confirmar_senha',
'placeholder': 'Confirmar Senha'}))
class LoginForm(AuthenticationForm):
username = forms.CharField(
label="Username", max_length=30,
widget=forms.TextInput(
attrs={'class': 'form-control form-control-lg',
'name': 'username',
'placeholder': 'Usuário'}))
password = forms.CharField(
label="Password", max_length=30,
widget=forms.PasswordInput(
attrs={'class': 'form-control',
'name': 'password',
'placeholder': 'Senha'}))
class UsuarioForm(ModelForm):
# Usuário
password = forms.CharField(
max_length=20,
label=_('Senha'),
widget=forms.PasswordInput())
password_confirm = forms.CharField(
max_length=20,
label=_('Confirmar Senha'),
widget=forms.PasswordInput())
class Meta:
model = Usuario
fields = ['username', 'email', 'nome', 'password', 'password_confirm',
'data_nascimento', 'sexo', 'plano', 'tipo', 'cep', 'end',
'numero', 'complemento', 'bairro', 'referencia',
'primeiro_telefone', 'segundo_telefone']
widgets = {'email': forms.TextInput(
attrs={'style': 'text-transform:lowercase;'})}
def __init__(self, *args, **kwargs):
super(UsuarioForm, self).__init__(*args, **kwargs)
self.fields['primeiro_telefone'].widget.attrs['class'] = 'telefone'
self.fields['segundo_telefone'].widget.attrs['class'] = 'telefone'
def valida_igualdade(self, texto1, texto2, msg):
if texto1 != texto2:
raise ValidationError(msg)
return True
def clean(self):
if ('password' not in self.cleaned_data or
'password_confirm' not in self.cleaned_data):
raise ValidationError(_('Favor informar senhas atuais ou novas'))
msg = _('As senhas não conferem.')
self.valida_igualdade(
self.cleaned_data['password'],
self.cleaned_data['password_confirm'],
msg)
try:
validate_password(self.cleaned_data['password'])
except ValidationError as error:
raise ValidationError(error)
return self.cleaned_data
@transaction.atomic
def save(self, commit=False):
usuario = super(UsuarioForm, self).save(commit)
# Cria User
u = User.objects.create(username=usuario.username, email=usuario.email)
u.set_password(self.cleaned_data['password'])
u.is_active = True
u.groups.add(get_or_create_grupo(self.cleaned_data['tipo'].descricao))
u.save()
usuario.user = u
usuario.save()
return usuario
class UsuarioEditForm(ModelForm):
# Primeiro Telefone
primeiro_tipo = forms.ChoiceField(
widget=forms.Select(),
choices=TIPO_TELEFONE,
label=_('Tipo Telefone'))
primeiro_ddd = forms.CharField(max_length=2, label=_('DDD'))
primeiro_numero = forms.CharField(max_length=10, label=_('Número'))
primeiro_principal = forms.TypedChoiceField(
widget=forms.Select(),
label=_('Telefone Principal?'),
choices=YES_NO_CHOICES)
# Primeiro Telefone
segundo_tipo = forms.ChoiceField(
required=False,
widget=forms.Select(),
choices=TIPO_TELEFONE,
label=_('Tipo Telefone'))
segundo_ddd = forms.CharField(required=False, max_length=2, label=_('DDD'))
segundo_numero = forms.CharField(
required=False, max_length=10, label=_('Número'))
segundo_principal = forms.ChoiceField(
required=False,
widget=forms.Select(),
label=_('Telefone Principal?'),
choices=YES_NO_CHOICES)
class Meta:
model = Usuario
fields = ['username', 'email', 'nome', 'data_nascimento', 'sexo',
'plano', 'tipo', 'cep', 'end', 'numero', 'complemento',
'bairro', 'referencia', 'primeiro_telefone',
'segundo_telefone']
widgets = {'username': forms.TextInput(attrs={'readonly': 'readonly'}),
'email': forms.TextInput(
attrs={'style': 'text-transform:lowercase;'}),
}
def __init__(self, *args, **kwargs):
super(UsuarioEditForm, self).__init__(*args, **kwargs)
self.fields['primeiro_telefone'].widget.attrs['class'] = 'telefone'
self.fields['segundo_telefone'].widget.attrs['class'] = 'telefone'
def valida_igualdade(self, texto1, texto2, msg):
if texto1 != texto2:
raise ValidationError(msg)
return True
def clean_primeiro_numero(self):
cleaned_data = self.cleaned_data
telefone = Telefone()
telefone.tipo = self.data['primeiro_tipo']
telefone.ddd = self.data['primeiro_ddd']
telefone.numero = self.data['primeiro_numero']
telefone.principal = self.data['primeiro_principal']
cleaned_data['primeiro_telefone'] = telefone
return cleaned_data
def clean_segundo_numero(self):
cleaned_data = self.cleaned_data
telefone = Telefone()
telefone.tipo = self.data['segundo_tipo']
telefone.ddd = self.data['segundo_ddd']
telefone.numero = self.data['segundo_numero']
telefone.principal = self.data['segundo_principal']
cleaned_data['segundo_telefone'] = telefone
return cleaned_data
@transaction.atomic
def save(self, commit=False):
usuario = super(UsuarioEditForm, self).save(commit)
# Primeiro telefone
tel = usuario.primeiro_telefone
tel.tipo = self.data['primeiro_tipo']
tel.ddd = self.data['primeiro_ddd']
tel.numero = self.data['primeiro_numero']
tel.principal = self.data['primeiro_principal']
tel.save()
usuario.primeiro_telefone = tel
# Segundo telefone
tel = usuario.segundo_telefone
if tel:
tel.tipo = self.data['segundo_tipo']
tel.ddd = self.data['segundo_ddd']
tel.numero = self.data['segundo_numero']
tel.principal = self.data['segundo_principal']
tel.save()
usuario.segundo_telefone = tel
# User
u = usuario.user
u.email = usuario.email
u.groups.remove(u.groups.first())
u.groups.add(get_or_create_grupo(self.cleaned_data['tipo'].descricao))<|fim▁hole|>
class EspecialidadeMedicoForm(ModelForm):
medico = forms.ModelChoiceField(
queryset=get_medicos(),
widget=Select2(select2attrs={'width': '535px'}))
especialidade = forms.ModelChoiceField(
queryset=Especialidade.objects.all(),
widget=Select2(select2attrs={'width': '535px'}))
class Meta:
model = EspecialidadeMedico
fields = ['especialidade', 'medico']<|fim▁end|> |
u.save()
usuario.save()
return usuario |
<|file_name|>Errors.py<|end_file_name|><|fim▁begin|><|fim▁hole|> pass<|fim▁end|> | class RepeatError(ValueError):
pass
class NoneError(ValueError): |
<|file_name|>calculate_energy_new_coords_general.py<|end_file_name|><|fim▁begin|>import os,sys
from trans_rot_coords import *
import numpy as np
from read_energy_force_new import *
from grids_structures_general import DS,Grid_Quarts
from orient_struct_2 import OrientDS as OrientDS_2
from orient_struct_3 import OrientDS as OrientDS_3
AU2KCAL = 23.0605*27.2116
R2D = 180.0/3.14159265358979
## np.pi/4.0:
pi4 = 0.78539816339744817
tMass = [15.999, 1.008, 1.008]
def get_com(coords):
x = [0,0,0]
totalM = 0
for i in range(len(coords)):
x = [ x[k]+ coords[i][k]*tMass[i] for k in range(3)]
totalM += tMass[i]
x = [x[k]/totalM for k in range(3)]
return x
def norm_prob(config,ndx,prob='wtr'):
if prob=='wtr':
v1 = np.array(config[ndx[1]]) - np.array(config[ndx[0]])
v2 = np.array(config[ndx[2]]) - np.array(config[ndx[0]])
vec = get_normal_unit(v1,v2)
return vec
class new_atom():
def __init__(self, line, ftype='gjf'):
if ftype=='gjf': self.addgjf(line)
elif ftype=='gms': self.addinp(line)
elif ftype=='pdb': self.addpdb(line)
def addgjf(self, line):
line = line.split()
self.a_nam = line[0]
self.x = [float(line[1]), float(line[2]), float(line[3])]
def addpdb(self, line):
self.line = line
self.i_atm = int(line[6:11])
self.a_nam = line[11:16].strip()
self.a_res = line[16:20].strip()
self.a_chn = line[20:22].strip()
self.i_res = int(line[22:26])
self.x = []
self.x.append(float(line[30:38]))
self.x.append(float(line[38:46]))
self.x.append(float(line[46:54]))
def addinp(self, line):
line = line.split()
self.a_nam = line[0]
self.x = [float(line[2]), float(line[3]), float(line[4])]
class coordinates():
def __init__(self, n1, n2, FragType, name=''):
## n1,n2 is the number of atoms in mole1 and mole2:
self.n1 = n1
self.n2 = n2
## records of operations of translation and rotation:
self.OperateNdx = []
self.Operation = []
## fragment type:
self.FT = FragType
## symmetry faces:
self.symface = DS[self.FT].symface
self.IsOriented = False
self.facendx = {'yx':2, 'xy':2,
'yz':0, 'zy':0,
'zx':1, 'xz':1,
'zarg':5,
'zben':6}
self.symm = [1,1,1]
self.center = 0
self.natoms = 0
self.original_atoms = []
self.name = name
def addatom(self, line, ftype='pdb'):
temp = new_atom(line, ftype)
self.original_atoms.append(temp)
self.natoms += 1
def addpdbatom(self, line):
self.original_atoms.append(new_atom(line, 'pdb'))
self.natoms += 1
def set_atom(self, i, atom):
if i>=len(self.original_atoms):
self.original_atoms.append( deepcopy(atom) )
self.natoms += 1
else: self.original_atoms[i] = deepcopy(atom)
def MirrorAll(self):
"""
According to the coords of the 1st atom in mole2.
"""
self.orignal_com = deepcopy(self.center2)
for face in self.symface:
fndx = self.facendx[face]
if self.center2[fndx] < 0.0:
self.symm[ fndx ] = -1
for i in range(self.n1, self.natoms):
self.atoms[i].x[fndx] *= -1
self._spherical_x()
def MirrorBackProperty(self):
for face in self.symface:
fndx = self.facendx[face]
if self.orignal_com[fndx] < 0.0:
self.symm[ fndx ] = -1
self.force[fndx] *= -1
for i in range(3):
if not i == fndx:
self.torque[i] *= -1
def ReorientToOrigin(self, cut=0.0000001):
self.atoms = deepcopy(self.original_atoms)
import pdb
pdb.set_trace()
coord1 = get_com([self.atoms[0].x, self.atoms[1].x, self.atoms[2].x ])
coord2 = get_com([self.atoms[3].x, self.atoms[4].x, self.atoms[5].x ])
self.origin_center_coord = get_unit([coord2[i] - coord1[i] for i in range(3)])
dvec = DS[self.FT].calt_dvec( self.atoms[0].x, self.atoms[1].x, self.atoms[2].x )
for i in range(self.natoms):
self.atoms[i].x = translate(self.atoms[i].x, dvec)
self.OperateNdx.append(0)
self.Operation.append(np.array(dvec))
vec, ax0 = DS[self.FT].calt_vec1( self.atoms[0].x, self.atoms[1].x, self.atoms[2].x )
ang = angle(vec, ax0)
ax = get_normal(vec, ax0)
if ax[0]==0.0 and ax[1]==0.0 and ax[2]==0.0: pass
else:
for i in range(self.natoms):
self.atoms[i].x = rotate(self.atoms[i].x, ax, ang)
self.OperateNdx.append(1)
self.Operation.append([ax, ang])
vec, ax0 = DS[self.FT].calt_vec2( self.atoms[0].x, self.atoms[1].x, self.atoms[2].x )
ang = angle(vec, ax0)
if abs(ang)<cut: pass
else:<|fim▁hole|> self.atoms[i].x = rotate(self.atoms[i].x, ax, ang)
self.OperateNdx.append(2)
self.Operation.append([ax, ang])
self.IsOriented = True
self._spherical_x()
def ReorientToOldVec(self):
ax, ang = self.Operation[self.OperateNdx.index(2)]
self.force = rotate(self.force, ax, -1*ang)
self.torque = rotate(self.torque, ax, -1*ang)
ax, ang = self.Operation[self.OperateNdx.index(1)]
self.force = rotate(self.force, ax, -1*ang)
self.torque = rotate(self.torque, ax, -1*ang)
def _spherical_x(self):
"""
Calculate the coords in spherical coordination system for molecule 2.
"""
totalM = 0
x = [0,0,0]
for i in range(self.n1,self.natoms):
x = [ x[k]+self.atoms[i].x[k]*tMass[i-self.n1] for k in range(3)]
totalM += tMass[i-self.n1]
x = [x[k]/totalM for k in range(3)]
r = np.sqrt(x[0]*x[0]+x[1]*x[1]+x[2]*x[2])
#print "probe vector:", 4.0*x[0]/r, 4.0*x[1]/r, 4.0*x[2]/r
## phi of principal coords:
ang1 = np.pi*0.5 - np.arccos(x[2]/r)
## theta of principal coords (from -pi to pi):
if abs(x[0])<0.000001:
if x[1]>0: ang2 = np.pi*0.5
else: ang2 = np.pi*1.5
else:
ang2 = np.arctan(x[1]/x[0])
if x[0]<0: ang2 += np.pi
elif x[1]<0: ang2 += np.pi*2
self.r = r
self.ang1 = ang1
self.ang2 = ang2
self.center2 = x
def _spherical_orient(self):
"""
calculate the spherical coordinates for the orientational vector
"""
x = self.orientVec
r = length(x)
# phi, [-pi/2, pi/2]
ang1 = np.pi*0.5 - np.arccos(x[2]/r)
# theta, [0, 2*pi]
if abs(x[0])<0.000001:
if x[1]>0: ang2 = np.pi*0.5
else: ang2 = np.pi*1.5
else:
ang2 = np.arctan(x[1]/x[0])
if x[0]<0: ang2 += np.pi
elif x[1] <0: ang2 += np.pi*2
self.orient_ang1 = ang1
self.orient_ang2 = ang2
def indexing_orient_auto3(self,ri):
"""
find the index automatically for each subsection in which the orientational vector resides
"""
ang1 = self.orient_ang1
ang2 = self.orient_ang2
#print "<<<<<",ang1*R2D,ang2*R2D
OrientDS = self.OrientDS[ri]
#print "attention!!!"
#print OrientDS['wtr'].nGrid
if ang1<OrientDS['wtr'].PHI_angles[0] or ang1>OrientDS['wtr'].PHI_angles[-1]: ih = -1
for i in range(1,OrientDS['wtr'].nPhi):
if ang1 <= OrientDS['wtr'].PHI_angles[i]:
ih = i-1
break
ang1_ndx1 = ih
ang1_ndx2 = ih + 1
if ang1_ndx1 == OrientDS['wtr'].nPhi-2: # near the up vertex
ang1_ndx3 = ih -1
elif ang1_ndx1 == 0: # near the down vertex
ang1_ndx3 = ih + 2
else:
tmp1 = OrientDS['wtr'].PHI_angles[ih+2] - ang1
tmp2 = ang1 - OrientDS['wtr'].PHI_angles[ih-1]
if abs(tmp1) < abs(tmp2):
ang1_ndx3 = ih + 2
else:
ang1_ndx3 = ih - 1
phiList = [ang1_ndx1,ang1_ndx2,ang1_ndx3]
dgrids_sub_ndx = {}
dtheta_ndx = {}
# determine if use linear interpolation or use quadratic interpolation
if len(set(phiList)) == 2:
iflinear = 1
elif len(set(phiList)) == 3:
iflinear = 0
for kk in set(phiList):
dgrids_sub_ndx[kk] = []
dtheta_ndx[kk] = []
ip = -1
for i in range(1, OrientDS['wtr'].NTheta[kk]):
if ang2 <= OrientDS['wtr'].THETA_angles[kk][i]:
ip = i-1
break
if ip == -1: ip = OrientDS['wtr'].NTheta[kk]-1
#print kk, ip
ig = 0
for i in range(kk): ig += OrientDS['wtr'].NTheta[i]
ig += ip
dgrids_sub_ndx[kk].append(ig)
dtheta_ndx[kk].append(ip)
if ip == OrientDS['wtr'].NTheta[kk]-1:
if OrientDS['wtr'].NTheta[kk] == 1: #vertex
dgrids_sub_ndx[kk].append(ig)
dtheta_ndx[kk].append(0)
if iflinear == 0:
dgrids_sub_ndx[kk].append(ig)
dtheta_ndx[kk].append(0)
else:
dgrids_sub_ndx[kk].append(ig-OrientDS['wtr'].NTheta[kk]+1)
dtheta_ndx[kk].append(0+OrientDS['wtr'].NTheta[kk])
if iflinear == 0:
tmp1 = OrientDS['wtr'].THETA_angles[kk][1] - ang2 + 2*np.pi
tmp2 = ang2 - OrientDS['wtr'].THETA_angles[kk][ip-1]
if tmp1 < tmp2:
dgrids_sub_ndx[kk].append(ig-OrientDS['wtr'].NTheta[kk]+1+1)
dtheta_ndx[kk].append(0+OrientDS['wtr'].NTheta[kk]+1)
else:
dgrids_sub_ndx[kk].append(ig-1)
dtheta_ndx[kk].append(ip-1)
else:
dgrids_sub_ndx[kk].append(ig+1)
dtheta_ndx[kk].append(ip+1)
if iflinear == 0:
if ip+2 == OrientDS['wtr'].NTheta[kk]:
tmp1 = 2*np.pi - ang2
else:
tmp1 = OrientDS['wtr'].THETA_angles[kk][ip+2] - ang2
if ip == 0:
tmp2 = ang2 - OrientDS['wtr'].THETA_angles[kk][OrientDS['wtr'].NTheta[kk]-1] + 2*np.pi
else:
tmp2 = ang2 - OrientDS['wtr'].THETA_angles[kk][ip-1]
if tmp1 < tmp2:
if ip+2 == OrientDS['wtr'].NTheta[kk]:
dgrids_sub_ndx[kk].append(ig+1-OrientDS['wtr'].NTheta[kk]+1)
dtheta_ndx[kk].append(0+OrientDS['wtr'].NTheta[kk])
else:
dgrids_sub_ndx[kk].append(ig+2)
dtheta_ndx[kk].append(ip+2)
else:
if ip == 0:
dgrids_sub_ndx[kk].append(ig+OrientDS['wtr'].NTheta[kk]-1)
dtheta_ndx[kk].append(-1)
else:
dgrids_sub_ndx[kk].append(ig-1)
dtheta_ndx[kk].append(ip-1)
self.dgrids_sub_ndx[ri] = dgrids_sub_ndx
self.dtheta_ndx[ri] = dtheta_ndx
def indexing_auto3(self):
if not self.IsOriented: raise Exception, "Error: indexing beforce reorientation."
r = self.r
ang1 = self.ang1
ang2 = self.ang2
#print "probe angles", ang1*R2D, ang2*R2D
## ndx of r:
ir = 10001
if r<DS[self.FT].R_NDX[0]: ir = -1
else:
for i in range(1,DS[self.FT].nDist):
if r<=DS[self.FT].R_NDX[i]:
ir = i-1
break
#print 'ir',ir
if ir>10000:
self.r_ndxs = [ir]
self.vbis = [0,0,0]
self.vnrm = [0,0,0]
self.dgrid_ndx_layer = {}
self.dtheta_ndx_layer = {}
return 10000,0,0
elif ir<0:
self.r_ndxs = [ir]
self.vbis = [0,0,0]
self.vnrm = [0,0,0]
self.dgrid_ndx_layer = {}
self.dtheta_ndx_layer = {}
return -1, 0,0
#print "r=%.1f"%r, ir
r_ndxs = [ir,ir+1]
# find 3 layers which are close to the query one
if ir == 0:
r_ndxs.append(ir+2)
elif ir == DS[self.FT].nDist -2:
r_ndxs.append(ir-1)
else:
tmp1 = r - DS[self.FT].R_NDX[ir-1]
tmp2 = DS[self.FT].R_NDX[ir+2] - r
if abs(tmp1) < abs(tmp2):
r_ndxs.append(ir-1)
else:
r_ndxs.append(ir+2)
## ndx of ang1 (Phi):
if ang1<DS[self.FT].PHI_angles[0]: ih = -1
for i in range(1, DS[self.FT].nPhi):
if ang1<=DS[self.FT].PHI_angles[i]:
ih = i-1
break
ang1_ndx1 = ih
ang1_ndx2 = ih + 1
if ang1_ndx1 == DS[self.FT].nPhi -2:
ang1_ndx3 = ih - 1
elif ang1_ndx1 == 0:
ang1_ndx3 = ih + 2
else:
tmp1 = DS[self.FT].PHI_angles[ih+2] - ang1
tmp2 = ang1 - DS[self.FT].PHI_angles[ih-1]
if tmp1 < tmp2:
ang1_ndx3 = ih+2
else:
ang1_ndx3 = ih-1
phiList = [ang1_ndx1,ang1_ndx2,ang1_ndx3]
dgrid_ndx_layer = {}
dtheta_ndx_layer = {}
# determine if use linear interpolation or use quadratic interpolation
if len(set(phiList)) == 2:
iflinear = 1
elif len(set(phiList)) == 3:
iflinear = 0
for kk in set(phiList):
dgrid_ndx_layer[kk] = []
dtheta_ndx_layer[kk] = []
## ndx_of_ang2 (Theta):
ip = -1
for i in range(1,DS[self.FT].NTheta[kk]):
if ang2<=DS[self.FT].THETA_angles[kk][i]:
ip = i-1
break
if ip==-1: ip = DS[self.FT].NTheta[kk]-1
ig = 0
for i in range(kk): ig += DS[self.FT].NTheta[i]
ig += ip
dgrid_ndx_layer[kk].append(ig)
dtheta_ndx_layer[kk].append(ip)
#print "check", kk, ip, ig
if ip == DS[self.FT].NTheta[kk]-1:
if DS[self.FT].NTheta[kk] == 1: #vertex
dgrid_ndx_layer[kk].append(ig)
dtheta_ndx_layer[kk].append(0)
if iflinear == 0:
dgrid_ndx_layer[kk].append(ig)
dtheta_ndx_layer[kk].append(0)
elif self.FT in ['cys','alc','bck','hid','trp','tyr','gln']:
dgrid_ndx_layer[kk].append(ig-DS[self.FT].NTheta[kk]+1)
dtheta_ndx_layer[kk].append(0+DS[self.FT].NTheta[kk])
if iflinear == 0:
tmp1 = DS[self.FT].THETA_angles[kk][1] - ang2 + 2*np.pi
tmp2 = ang2 - DS[self.FT].THETA_angles[kk][ip-1]
if tmp1 < tmp2:
dgrid_ndx_layer[kk].append(ig-DS[self.FT].NTheta[kk]+1+1)
dtheta_ndx_layer[kk].append(0+DS[self.FT].NTheta[kk]+1)
else:
dgrid_ndx_layer[kk].append(ig-1)
dtheta_ndx_layer[kk].append(ip-1)
else:
dgrid_ndx_layer[kk].append(ig-1)
dtheta_ndx_layer[kk].append(ip-1)
if iflinear == 0:
dgrid_ndx_layer[kk].append(ig-2)
dtheta_ndx_layer[kk].append(ip-2)
else:
dgrid_ndx_layer[kk].append(ig+1)
dtheta_ndx_layer[kk].append(ip+1)
if iflinear == 0:
if self.FT in ['cys','alc','bck','hid','trp','tyr','gln']:
if ip+2 == DS[self.FT].NTheta[kk]:
tmp1 = 2*np.pi -ang2
else:
tmp1 = DS[self.FT].THETA_angles[kk][ip+2] - ang2
if ip == 0:
tmp2 = ang2 - DS[self.FT].THETA_angles[kk][DS[self.FT].NTheta[kk]-1] + 2*np.pi
else:
tmp2 = ang2 - DS[self.FT].THETA_angles[kk][ip-1]
if tmp1 < tmp2:
if ip+2 == DS[self.FT].NTheta[kk]:
dgrid_ndx_layer[kk].append(ig+1-DS[self.FT].NTheta[kk]+1)
dtheta_ndx_layer[kk].append(0+DS[self.FT].NTheta[kk])
else:
dgrid_ndx_layer[kk].append(ig+2)
dtheta_ndx_layer[kk].append(ip+2)
else:
if ip == 0:
dgrid_ndx_layer[kk].append(ig+DS[self.FT].NTheta[kk]-1)
dtheta_ndx_layer[kk].append(-1)
else:
dgrid_ndx_layer[kk].append(ig-1)
dtheta_ndx_layer[kk].append(ip-1)
else:
if ip == DS[self.FT].NTheta[kk]-2:
dgrid_ndx_layer[kk].append(ig-1)
dtheta_ndx_layer[kk].append(ip-1)
elif ip == 0:
dgrid_ndx_layer[kk].append(ig+2)
dtheta_ndx_layer[kk].append(ip+2)
else:
tmp1 = DS[self.FT].THETA_angles[kk][ip+2] - ang2
tmp2 = ang2 - DS[self.FT].THETA_angles[kk][ip-1]
if tmp1 < tmp2:
dgrid_ndx_layer[kk].append(ig+2)
dtheta_ndx_layer[kk].append(ip+2)
else:
dgrid_ndx_layer[kk].append(ig-1)
dtheta_ndx_layer[kk].append(ip-1)
self.dgrid_ndx_layer = dgrid_ndx_layer
self.dtheta_ndx_layer = dtheta_ndx_layer
## calculate the vectors of bisector and normal of mole2:
a20 = self.atoms[self.n1].x
a21 = self.atoms[self.n1+1].x
a22 = self.atoms[self.n1+2].x
a20 = np.array(a20)
a21 = np.array(a21)
a22 = np.array(a22)
v0 = a21 - a20
v1 = a22 - a20
## These two vectors must be unit vector:
bisect = get_bisect_unit(v0,v1)
normal = get_normal_unit(v0,v1)
self.r_ndxs = r_ndxs
self.vbis = bisect
self.vnrm = normal
def calt_conf_energy(self, allconfigs, IsForce=False, ehigh=100.0):
ri_ndxs = self.r_ndxs
self.exit_before = False
for ri in ri_ndxs:
if ri>100:
self.properties = {'E':0.0}
return
elif ri<0:
fuck = [self.origin_center_coord[i] * ehigh for i in range(3)]
self.properties = {'E':ehigh, "Fx": fuck[0], "Fy": fuck[1], "Fz": fuck[2],
"Tx": 0, "Ty": 0, "Tz": 0}
self.exit_before = True
return
bisv = self.vbis
nrmv = self.vnrm
dtheta_ndx_layer = self.dtheta_ndx_layer
grid_ndx_layer = []
for ih in self.dgrid_ndx_layer:
grid_ndx_layer += self.dgrid_ndx_layer[ih]
self.orientVec = bisv
#print "orient vector:%.5f\t%.5f\t%.5f\n"%(bisv[0]*4.0,bisv[1]*4.0,bisv[2]*4.0)
self._spherical_orient()
ang1 = self.orient_ang1
ang2 = self.orient_ang2
ang2 = (ang2*R2D+180)%360 #the original orientational vector of water is located at -x axis
ang2 = ang2/R2D
self.orient_ang2 = ang2
self.OrientDS = {}
self.orient_tr = {}
self.orient_pr = {}
self.dgrids_sub_ndx = {}
self.dtheta_ndx = {}
grids_sub_ndx = {}
dtheta_ndx = {}
wghx1 = {}
wghx2 = {}
wghy = {}
label = {}
for i in ri_ndxs:
dist = DS[self.FT].R_NDX[i] # choose corresponding orientational sampling based on distance
#print "which layer:", dist
if dist > 5.5000001:
cart_ndx, grids_sub_ndx_tmp, wghx_tmp, wghy_tmp = weights_in_subsection( bisv )
grids_sub_ndx[i] = grids_sub_ndx_tmp
wghx1[i] = wghx_tmp/pi4
wghx2[i] = wghx_tmp/pi4
wghy[i] = wghy_tmp/pi4
label[i] = 0
else:
if dist < 2.5000001:
OrientDS = OrientDS_2
elif dist > 2.5000001 and dist < 3.5000001:
OrientDS = OrientDS_3
else:
OrientDS = OrientDS_2
self.OrientDS[i] = OrientDS
self.indexing_orient_auto3(i)
dtheta_ndx[i] = self.dtheta_ndx[i]
if len(dtheta_ndx[i]) == 2: # not in this script
pass
#orient_pr =[]
#for kk in dtheta_ndx[i]:
# ip1=dtheta_ndx[i][kk][0]
# ip2=dtheta_ndx[i][kk][1]
# if ip1 == 0 and ip2 == 0: # vertex
# wtmp = 0
# elif ip1 == OrientDS['wtr'].NTheta[kk]-1:
# wtmp = (ang2-OrientDS['wtr'].THETA_angles[kk][ip1])/(2*np.pi+OrientDS['wtr'].THETA_angles[kk][0]-OrientDS['wtr'].THETA_angles[kk][ip1])
# else:
# wtmp = (ang2-OrientDS['wtr'].THETA_angles[kk][ip1])/(OrientDS['wtr'].THETA_angles[kk][ip2]-OrientDS['wtr'].THETA_angles[kk][ip1])
# orient_pr.append(wtmp)
#wghx1[i] = orient_pr[0]
#wghx2[i] = orient_pr[1]
#ihs = dtheta_ndx[i].keys()
#wghy[i] = (ang1 - OrientDS['wtr'].PHI_angles[ihs[0]])/(OrientDS['wtr'].PHI_angles[ihs[1]]-OrientDS['wtr'].PHI_angles[ihs[0]])
#label[i] = 1
##print "++++++",wghx1[i],wghx2[i],wghy[i]
#grids_sub_ndx[i] = self.dgrids_sub_ndx[i][ihs[0]] + self.dgrids_sub_ndx[i][ihs[1]]
if len(dtheta_ndx[i]) == 3:
ihs = dtheta_ndx[i].keys()
grids_sub_ndx[i] = self.dgrids_sub_ndx[i][ihs[0]] + self.dgrids_sub_ndx[i][ihs[1]] + self.dgrids_sub_ndx[i][ihs[2]]
label[i] = 2
#print "grids_sub_ndx:",grids_sub_ndx[i]
properties = {'E':[], 'Fx':[], 'Fy':[], 'Fz':[], 'Tx':[], 'Ty':[], 'Tz':[]}
propnames = ['E','Fx','Fy','Fz','Tx','Ty','Tz']
tempprop = deepcopy(properties)
for i in ri_ndxs:
for j in grid_ndx_layer:
prop = deepcopy(tempprop)
for ni in grids_sub_ndx[i]:
inpfiles = []
for k in range(DS[self.FT].nNorm[i]):
inpfile = 'r%3.2f/tempconf_d%3.2f_g%03d_c%02d.inp'%(DS[self.FT].R_NDX[i],DS[self.FT].R_NDX[i],j,ni+k*DS[self.FT].nConf[i])
inpfiles.append(inpfile)
xvecs = []
for ff in range(len(inpfiles)):
xconf = allconfigs.allcfg[i][j][ni][ff].xmole2
xvecs.append( norm_prob(xconf,[0,1,2],'wtr') )
nvec = len(xvecs)
if nvec == 2: # linear interpolation for normal vectors
w0, w1, ndx0, ndx1 = weights_for_normal_general( nrmv, xvecs)
#print 'test',i, j, ni, ndx0, ndx1
for pp in propnames:
p0 = allconfigs.get_prop(i,j,ni,ndx0,pp,w0, ehigh=ehigh)
p1 = allconfigs.get_prop(i,j,ni,ndx1,pp,w1, ehigh=ehigh)
p = p1*abs(w1) + p0*abs(w0)
prop[pp].append(p)
#print pp, inpfiles[ndx0],p0,w0,inpfiles[ndx1],p1,w1,p
elif nvec > 2: # quadratic interpolation for normal vectors
angNorm, ndx1, ndx2, ndx3 = get_neighors_for_normal(nrmv, xvecs)
angNorm_1 = ndx1*np.pi/nvec
angNorm_2 = ndx2*np.pi/nvec
angNorm_3 = ndx3*np.pi/nvec
#print "lagrange", i, j, ni, ndx1, ndx2, ndx3, angNorm*R2D, angNorm_1*R2D, angNorm_2*R2D, angNorm_3*R2D
for pp in propnames:
if ndx1 == nvec: ndx1 = 0
if ndx2 == nvec: ndx2 = 0
if ndx3 == nvec: ndx3 = 0
p1 = allconfigs.get_prop(i,j,ni,ndx1,pp,0, ehigh=ehigh)
p2 = allconfigs.get_prop(i,j,ni,ndx2,pp,0, ehigh=ehigh)
p3 = allconfigs.get_prop(i,j,ni,ndx3,pp,0, ehigh=ehigh)
points = [(angNorm_1,p1),(angNorm_2,p2),(angNorm_3,p3)]
p = lagrange_interp(points,angNorm)
prop[pp].append(p)
#print pp, inpfiles[ndx1],p1,inpfiles[ndx2],p2,inpfiles[ndx3],p3,p
for pp in propnames:
# on the level of orientation, theta and phi
if len(prop[pp]) == 4:
psub = bilinear_gen(prop[pp][0], prop[pp][1], prop[pp][2], prop[pp][3], wghx1[i], wghx2[i], wghy[i],label[i])
properties[pp].append(psub)
#print pp, prop[pp][0], prop[pp][1], prop[pp][2], prop[pp][3], grids_sub_ndx[i], wghx1[i], wghx2[i], wghy[i],psub
elif len(prop[pp]) == 9:
cn = 0
points_phi = []
for kk in dtheta_ndx[i]:
#print "here",kk, self.OrientDS[i]['wtr'].nPhi
angPhi = self.OrientDS[i]['wtr'].PHI_angles[kk]
#print "for orientation with phi=",angPhi*R2D
if len(set(dtheta_ndx[i][kk])) == 1: # vertex
p = prop[pp][cn]
points_phi.append((angPhi,p))
cn += 3
continue
points_theta = []
for ip in dtheta_ndx[i][kk]:
if ip >= self.OrientDS[i]['wtr'].NTheta[kk]:
angTheta = 2*np.pi + self.OrientDS[i]['wtr'].THETA_angles[kk][ip-self.OrientDS[i]['wtr'].NTheta[kk]]
elif ip < 0:
angTheta = self.OrientDS[i]['wtr'].THETA_angles[kk][ip] - 2*np.pi
else:
angTheta = self.OrientDS[i]['wtr'].THETA_angles[kk][ip]
points_theta.append((angTheta,prop[pp][cn]))
#print pp, angTheta*R2D, prop[pp][cn]
cn += 1
p = lagrange_interp(points_theta,ang2)
#print 'quadratic interpolation gives',p, 'for property', pp
points_phi.append((angPhi,p))
psub = lagrange_interp(points_phi,ang1)
#print 'interpolated orientational property of %s:'%pp,psub
properties[pp].append(psub)
## on the level of r, theta, phi
self.properties = {}
if len(dtheta_ndx_layer) == 2: # for grids near vertex of each layers, linear interpolation for grids and quadratic interpolation for layers; NOT IN THIS SCRIPT
pass
#Wghx = []
#For kk in dtheta_ndx_layer:
# ip1 = dtheta_ndx_layer[kk][0]
# ip2 = dtheta_ndx_layer[kk][1]
# if ip1 == 0 and ip2 == 0:
# wtmp = 0
# else:
# wtmp = (self.ang2-DS[self.FT].THETA_angles[kk][ip1])/(DS[self.FT].THETA_angles[kk][ip2]-DS[self.FT].THETA_angles[kk][ip1])
# wghx.append(wtmp)
#Ihs = dtheta_ndx_layer.keys()
#Wghy = (self.ang1-DS[self.FT].PHI_angles[ihs[0]])/(DS[self.FT].PHI_angles[ihs[1]]-DS[self.FT].PHI_angles[ihs[0]])
#For pp in propnames:
# psub_r = []
# for m in range(0,len(properties[pp]),4): # for each layer
# #print pp, properties[pp][m], properties[pp][m+1],properties[pp][m+2], properties[pp][m+3], wghx[0], wghx[1], wghy
# psub = bilinear_gen(properties[pp][m], properties[pp][m+1],properties[pp][m+2], properties[pp][m+3], wghx[0], wghx[1], wghy,1)
# psub_r.append(psub)
# if not len(psub_r) == 3:
# #print 'quadratic interpolation needs 3 layers'
# sys.exit()
# points = []
# for t in range(len(ri_ndxs)):
# dist = DS[self.FT].R_NDX[ri_ndxs[t]]
# points.append((dist,psub_r[t]))
# p = lagrange_interp(points,self.r)
# self.properties[pp] = p
elif len(dtheta_ndx_layer) == 3: # quadratic interpolation for layers and grids
for pp in propnames:
psub_r = []
for m in range(0,len(properties[pp]),9): # for each layer
count = 0
points_th = []
for kk in dtheta_ndx_layer:
if len(set(dtheta_ndx_layer[kk])) == 1: # vertex
p = properties[pp][m+count]
points_th.append((DS[self.FT].PHI_angles[kk],p))
count += 3
continue
ip1 = dtheta_ndx_layer[kk][0]
ip2 = dtheta_ndx_layer[kk][1]
ip3 = dtheta_ndx_layer[kk][2]
th1 = DS[self.FT].THETA_angles[kk][ip1]
th2 = DS[self.FT].THETA_angles[kk][ip2]
th3 = DS[self.FT].THETA_angles[kk][ip3]
points = [(th1,properties[pp][m+count]),(th2,properties[pp][m+count+1]),(th3,properties[pp][m+count+2])]
p = lagrange_interp(points,self.ang2)
points_th.append((DS[self.FT].PHI_angles[kk],p))
count += 3
p = lagrange_interp(points_th,self.ang1)
psub_r.append(p)
if not len(psub_r) == 3:
#print 'quadratic interpolation needs 3 layers'
sys.exit()
points = []
for t in range(len(ri_ndxs)):
dist = DS[self.FT].R_NDX[ri_ndxs[t]]
points.append((dist,psub_r[t]))
p = lagrange_interp(points,self.r)
self.properties[pp] = p
def reverse_force_toque(self):
Fx = self.properties['Fx']
Fy = self.properties['Fy']
Fz = self.properties['Fz']
self.force = [Fx, Fy, Fz]
Tx = self.properties['Tx']
Ty = self.properties['Ty']
Tz = self.properties['Tz']
self.torque = [Tx, Ty, Tz]
if self.exit_before:
return
self.MirrorBackProperty()
self.ReorientToOldVec()
def get_interp_energy(self):
return self.properties['E']
def get_interp_force(self):
return self.force
def get_interp_torque(self):
return self.torque<|fim▁end|> | if abs(ang-np.pi)<cut: ax = [1,0,0]
else: ax = get_normal(vec, ax0)
for i in range(self.natoms): |
<|file_name|>CreativeTabBCT.java<|end_file_name|><|fim▁begin|>package com.calebmeyer.bettercrafting.creativetab;
import com.calebmeyer.bettercrafting.constants.Project;
import com.calebmeyer.bettercrafting.initialization.ModItems;
import net.minecraft.creativetab.CreativeTabs;
import net.minecraft.item.Item;
public class CreativeTabBCT {
public static final CreativeTabs BETTER_CRAFTING_TABLES_TAB = new CreativeTabs(Project.MOD_ID) {
/**<|fim▁hole|> * This method returns an item, whose icon is used for this creative tab
*
* @return an item to use for the creative tab's icon
*/
@Override
public Item getTabIconItem() {
return ModItems.craftingPanel;
}
/**
* Gets the label for this creative tab.
*
* @return the label
*/
@Override
public String getTranslatedTabLabel() {
return Project.MOD_NAME;
}
};
}<|fim▁end|> | |
<|file_name|>EntityRef.java<|end_file_name|><|fim▁begin|>//////////////////////////////////////////////////
// JIST (Java In Simulation Time) Project
// Timestamp: <EntityRef.java Sun 2005/03/13 11:10:16 barr rimbase.rimonbarr.com>
//
// Copyright (C) 2004 by Cornell University
// All rights reserved.
// Refer to LICENSE for terms and conditions of use.
package jist.runtime;
import java.lang.reflect.Method;
import java.lang.reflect.InvocationHandler;
import java.rmi.RemoteException;
/**
* Stores a reference to a (possibly remote) Entity object. A reference
* consists of a serialized reference to a Controller and an index within that
* Controller.
*
* @author Rimon Barr <[email protected]>
* @version $Id: EntityRef.java,v 1.1 2007/04/09 18:49:26 drchoffnes Exp $
* @since JIST1.0
*/
public class EntityRef implements InvocationHandler
{
/**
* NULL reference constant.
*/
public static final EntityRef NULL = new EntityRefDist(null, -1);
/**
* Entity index within Controller.
*/
private final int index;
/**
* Initialise a new entity reference with given<|fim▁hole|> * @param index entity ID
*/
public EntityRef(int index)
{
this.index = index;
}
/**
* Return entity reference hashcode.
*
* @return entity reference hashcode
*/
public int hashCode()
{
return index;
}
/**
* Test object equality.
*
* @param o object to test equality
* @return object equality
*/
public boolean equals(Object o)
{
if(o==null) return false;
if(!(o instanceof EntityRef)) return false;
EntityRef er = (EntityRef)o;
if(index!=er.index) return false;
return true;
}
/**
* Return controller of referenced entity.
*
* @return controller of referenced entity
*/
public ControllerRemote getController()
{
if(Main.SINGLE_CONTROLLER)
{
return Controller.activeController;
}
else
{
throw new RuntimeException("multiple controllers");
}
}
/**
* Return index of referenced entity.
*
* @return index of referenced entity
*/
public int getIndex()
{
return index;
}
/**
* Return toString of referenced entity.
*
* @return toString of referenced entity
*/
public String toString()
{
try
{
return "EntityRef:"+getController().toStringEntity(getIndex());
}
catch(java.rmi.RemoteException e)
{
throw new RuntimeException(e);
}
}
/**
* Return class of referenced entity.
*
* @return class of referenced entity
*/
public Class getClassRef()
{
try
{
return getController().getEntityClass(getIndex());
}
catch(java.rmi.RemoteException e)
{
throw new RuntimeException(e);
}
}
//////////////////////////////////////////////////
// proxy entities
//
/** boolean type for null return. */
private static final Boolean RET_BOOLEAN = new Boolean(false);
/** byte type for null return. */
private static final Byte RET_BYTE = new Byte((byte)0);
/** char type for null return. */
private static final Character RET_CHARACTER = new Character((char)0);
/** double type for null return. */
private static final Double RET_DOUBLE = new Double((double)0);
/** float type for null return. */
private static final Float RET_FLOAT = new Float((float)0);
/** int type for null return. */
private static final Integer RET_INTEGER = new Integer(0);
/** long type for null return. */
private static final Long RET_LONG = new Long(0);
/** short type for null return. */
private static final Short RET_SHORT = new Short((short)0);
/**
* Called whenever a proxy entity reference is invoked. Schedules the call
* at the appropriate Controller.
*
* @param proxy proxy entity reference object whose method was invoked
* @param method method invoked on entity reference object
* @param args arguments of the method invocation
* @return result of blocking event; null return for non-blocking events
* @throws Throwable whatever was thrown by blocking events; never for non-blocking events
*/
public Object invoke(Object proxy, Method method, Object[] args) throws Throwable
{
try
{
if(Rewriter.isBlockingRuntimeProxy(method))
// todo: make Object methods blocking
//|| method.getDeclaringClass()==Object.class)
{
return blockingInvoke(proxy, method, args);
}
else
{
// schedule a simulation event
if(Main.SINGLE_CONTROLLER)
{
Controller.activeController.addEvent(method, this, args);
}
else
{
getController().addEvent(method, this, args);
}
return null;
}
}
catch(RemoteException e)
{
throw new JistException("distributed simulation failure", e);
}
}
/**
* Helper method: called whenever a BLOCKING method on proxy entity reference
* is invoked. Schedules the call at the appropriate Controller.
*
* @param proxy proxy entity reference object whose method was invoked
* @param method method invoked on entity reference object
* @param args arguments of the method invocation
* @return result of blocking event
* @throws Throwable whatever was thrown by blocking events
*/
private Object blockingInvoke(Object proxy, Method method, Object[] args) throws Throwable
{
Controller c = Controller.getActiveController();
if(c.isModeRestoreInst())
{
// restore complete
if(Controller.log.isDebugEnabled())
{
Controller.log.debug("restored event state!");
}
// return callback result
return c.clearRestoreState();
}
else
{
// calling blocking method
c.registerCallEvent(method, this, args);
// todo: darn Java; this junk slows down proxies
Class ret = method.getReturnType();
if(ret==Void.TYPE)
{
return null;
}
else if(ret.isPrimitive())
{
String retName = ret.getName();
switch(retName.charAt(0))
{
case 'b':
switch(retName.charAt(1))
{
case 'o': return RET_BOOLEAN;
case 'y': return RET_BYTE;
default: throw new RuntimeException("unknown return type");
}
case 'c': return RET_CHARACTER;
case 'd': return RET_DOUBLE;
case 'f': return RET_FLOAT;
case 'i': return RET_INTEGER;
case 'l': return RET_LONG;
case 's': return RET_SHORT;
default: throw new RuntimeException("unknown return type");
}
}
else
{
return null;
}
}
}
} // class: EntityRef<|fim▁end|> | * Controller and Entity IDs.
* |
<|file_name|>api.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
from .httpclient import HTTPClient
from .models import Video, Show
__all__ = ['Funimation']
class Funimation(object):
def __init__(self):
super(Funimation, self).__init__()
self.http = HTTPClient('http://www.funimation.com/',
[('User-Agent', 'Sony-PS3')])
# defaults to the free account user
# hmm... the API doesn't appear to validate the users subscription
# level so if this was changed you might be able to watch
# the paid videos ;)
# FunimationSubscriptionUser = paid account
# FunimationUser = free account
self.user_type = 'FunimationSubscriptionUser'<|fim▁hole|> def get_shows(self, limit=3000, offset=0, sort=None, first_letter=None,
filter=None):
query = self._build_query(locals())
return self._request('feeds/ps/shows', query)
def get_videos(self, show_id, limit=3000, offset=0):
query = self._build_query(locals())
request = self._request('feeds/ps/videos', query)
for req in request:
# Replace get params with the mobile one
# This lets any IP (not only server IP) access content
req.video_url = req.video_url.split('?')[0]+'?9b303b6c62204a9dcb5ce5f5c607'
video_split = req.video_url.split(',')
split_len = len(video_split)
req.video_url = video_split[0]+video_split[split_len-2]+video_split[split_len-1]
return request
def get_featured(self, limit=3000, offset=0):
query = self._build_query(locals())
return self._request('feeds/ps/featured', query)
def search(self, search):
query = self._build_query(locals())
return self._request('feeds/ps/search', query)
def get_latest(self, limit=3000, offset=0):
if self.user_type == 'FunimationSubscriptionUser':
sort = 'SortOptionLatestSubscription'
else:
sort = 'SortOptionLatestFree'
return self.get_shows(limit, offset, sort)
def get_simulcast(self, limit=3000, offset=0):
return self.get_shows(limit, offset, filter='FilterOptionSimulcast')
def get_genres(self):
# we have to loop over all the shows to be sure to get all the genres.
# use a 'set' so duplicates are ignored.
genres = set()
for show in self.get_shows():
if show.get('genres'):
[genres.add(g) for g in show.get('genres').split(',')]
return sorted(genres)
def get_shows_by_genre(self, genre):
shows = []
for show in self.get_shows():
if show.get('genres') and genre in show.get('genres').split(','):
shows.append(show)
return shows
def _request(self, uri, query):
res = self.http.get(uri, query)
if 'videos' in res:
return [Video(**v) for v in res['videos']]
elif isinstance(res, list) and 'series_name' in res[0]:
return [Show(**s) for s in res]
else:
# search results
new_res = {}
# the result is a list when there is no episodes in the results...
if isinstance(res['episodes'], list):
new_res['episodes'] = []
else:
new_res['episodes'] = [Video(**v) for v in
res['episodes']['videos']]
new_res['shows'] = [Show(**s) for s in res['shows']]
return new_res
def _build_query(self, params):
if params is None:
params = {}
else:
params['first-letter'] = params.pop('first_letter', None)
params.pop('self', None)
params.setdefault('ut', self.user_type)
return params<|fim▁end|> | |
<|file_name|>light_controller.py<|end_file_name|><|fim▁begin|>import dmx
import socket
wash = dmx.EuroliteMovingHeadWash(base=1, color=(1, 1, 1), intensity=1)
controller = dmx.DMXController(debug=True, fixtures=[wash], port='/dev/tty.usbmodem1421')
controller.enabled = True
server = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
server.bind(('localhost', 9000))
while True:
message = server.recv(1000)
command, argument = message.strip().split(' ', 1)
if command == 'color':
rgb = int(argument, 16)<|fim▁hole|> wash.pan = int(argument)<|fim▁end|> | wash.color = (rgb >> 16) / 255., ((rgb >> 8) & 0xff) / 255., (rgb & 0xff) / 255.
elif command == 'tilt':
wash.tilt = int(argument)
elif command == 'pan': |
<|file_name|>tcp_message.py<|end_file_name|><|fim▁begin|>"""
tcp_message Inline Script Hook API Demonstration
------------------------------------------------
* modifies packets containing "foo" to "bar"
* prints various details for each packet.
example cmdline invocation:
mitmdump -T --host --tcp ".*" -q -s examples/tcp_message.py
"""<|fim▁hole|> modified_msg = tcp_msg.message.replace("foo", "bar")
is_modified = False if modified_msg == tcp_msg.message else True
tcp_msg.message = modified_msg
print(
"[tcp_message{}] from {} {} to {} {}:\r\n{}".format(
" (modified)" if is_modified else "",
"client" if tcp_msg.sender == tcp_msg.client_conn else "server",
tcp_msg.sender.address,
"server" if tcp_msg.receiver == tcp_msg.server_conn else "client",
tcp_msg.receiver.address, strutils.bytes_to_escaped_str(tcp_msg.message))
)<|fim▁end|> | from mitmproxy.utils import strutils
def tcp_message(tcp_msg): |
<|file_name|>nodejs.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
# Copyright 2016 DIANA-HEP
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.<|fim▁hole|>import os
def write(vegaSpec, outputFile, format=None):
"""Use the 'vega' package in Nodejs to write to SVG or PNG files.
Unlike interactive plotting, this does not require a round trip through a web browser, but it does require a
Nodejs installation on your computer (to evaluate the Javascript).
To install the prerequisites on an Ubuntu system, do
# Cairo dependencies for generating PNG:
sudo apt-get install install libcairo2-dev libjpeg-dev libgif-dev libpango1.0-dev build-essential g++
# Nodejs and its package manager, npm:
sudo apt-get install npm
# Get the 'vega' package with npm; user-install, not global (no sudo)!
npm install vega
Parameters:
vegaSpec (string or dict): JSON string or its dict-of-dicts equivalent
outputFile (string or None): output file name or None to return output as a string
format ('svg', 'png', or None): None (default) guesses format from outputFile extension
"""
if format is None and outputFile is None:
format = "svg"
elif format is None and outputFile.endswith(".svg"):
format = "svg"
elif format is None and outputFile.endswith(".png"):
format = "png"
else:
raise IOError("Could not infer format from outputFile")
if format == "png":
cmd = "vg2png"
elif format == "svg":
cmd = "vg2svg"
else:
raise IOError("Only 'png' and 'svg' output is supported.")
npmbin = subprocess.Popen(["npm", "bin"], stdout=subprocess.PIPE)
if npmbin.wait() == 0:
npmbin = npmbin.stdout.read().strip()
else:
raise IOError("Nodejs Package Manager 'npm' must be installed to use nodejs.write function.")
tmp = tempfile.NamedTemporaryFile(delete=False)
if isinstance(vegaSpec, dict):
vegaSpec = json.dump(tmp, vegaSpec)
else:
tmp.write(vegaSpec)
tmp.close()
if outputFile is None:
vg2x = subprocess.Popen([cmd, tmp.name], stdout=subprocess.PIPE, env=dict(
os.environ, PATH=npmbin + ":" + os.environ.get("PATH", "")))
if vg2x.wait() == 0:
return vg2x.stdout.read()
else:
os.unlink(tmp.name)
raise IOError("Command '{0}' failed; if it's not installed, install it with 'npm install vega'".format(cmd))
else:
vg2x = subprocess.Popen([cmd, tmp.name, outputFile], stdout=subprocess.PIPE,
env=dict(os.environ, PATH=npmbin + ":" + os.environ.get("PATH", "")))
if vg2x.wait() != 0:
os.unlink(tmp.name)
raise IOError("Command '{0}' failed; if it's not installed, install it with 'npm install vega'".format(cmd))<|fim▁end|> |
import json
import subprocess
import tempfile |
<|file_name|>APIMaker.ts<|end_file_name|><|fim▁begin|>import { isNumber } from "util";
import axios, { AxiosRequestConfig } from 'axios';
export interface SMethod {
Name: string
fnName: string
QueryParameters: any[]
PathParameters: any[]
HeaderParameters: any[]
RequiredArgs: number
TotalArgs: number
Verb:string,
Keep:boolean,
RelativeRoute
}
export interface SNode {
FormatName: string
Class: SNode
Properties: SNode[]
Methods: SMethod[]
Route: string
PathParameter: string[]
}
export interface IAPICall {
verb:string
route:string
query?:string
body?:any
headers:any
}
export type APIPreSend = (data:IAPICall)=>Promise<any>;
export class APIService {<|fim▁hole|> private preSend:APIPreSend
fnHandler (route:string, method:SMethod, personalKey:string, ...args:any[]) {
args = this.validateArgs(method.Name, args, method.RequiredArgs, method.TotalArgs);
route = this.getRoute(route, method, args);
let query = this.getQuery(method.Verb, method.QueryParameters, args);
args.splice(0,0,this.personalKey);
let Headers = this.getHeaders(method.Verb, method.HeaderParameters, args);
let Body = args[0];
let retObj:IAPICall = {
headers: Headers,
verb:method.Verb,
route,
};
if(query)retObj.query = query;
if(Body)retObj.body = Body;
return this.preSend(retObj);
}
send(data:IAPICall) {
let url = data.route + (data.query ? data.query : '');
let headers = {
Authorization: data.headers.Authorization
};
if ( data.headers['X-Filter'] ) {
headers['X-Filter'] = JSON.stringify(data.headers['X-Filter']);
}
let config:AxiosRequestConfig = Object.assign({}, this.axios_config, {
method: data.verb,
url,
headers,
data: data.body
});
return axios(config).then(res => res.data).catch(err => err.response.data );
}
constructor(private server:string, private personalKey:string, API:SNode, preSend?:APIPreSend, private axios_config:AxiosRequestConfig = {}) {
this.personalKey = `Bearer ${this.personalKey}`;
this.preSend = preSend || this.send;
return this.toFinal(API, server);
}
checkListArgs (args:any[]) {
let [page, page_size, filter] = args;
if( typeof(page) === 'object' ) {
filter = page;
page = undefined;
} else if(typeof(page_size) === 'object') {
filter = page_size;
page_size = undefined;
}
let error = !(page === undefined || isNumber(page)) || !(page_size === undefined || isNumber(page_size)) || !(filter === undefined || filter instanceof Object);
if(error) {
throw new Error('Arguments are not valid');
}
return [ page, page_size, filter ];
}
validateArgs(fnName:string, args:any[], required:number, total:number) {
if(fnName === 'list') {
return this.checkListArgs(args)
} else {
let argCount = args.length;
let argsCountMatch = argCount >= required && argCount <= total;
if( !argsCountMatch ) {
throw new Error('Argument count does not match')
}
return args;
}
}
getQuery (method:string, QueryParameters:string[], args:any[]) {
let query;
if ( method === 'get') {
query = QueryParameters.map( (parameter,i) => args[i] ? `${parameter}=${args[i]}`:'').filter(f=>f).join('&');
query = query ? `?${query}` : '';
args.splice(0, QueryParameters.length);
}
return query;
}
getHeaders (method:string, HeaderParameters:string[], args:any[]) {
let Headers = {};
for(const parameter of HeaderParameters) {
if(args[0]) {
Headers[parameter] = args[0];
args.splice(0, 1);
}
}
return Headers;
}
getRoute(route:string, method:SMethod, args:any) {
if(method.Keep) {
route += `/${method.Name}`;
}
for(let i=0; method.PathParameters[i]; i++) {
let {name} = method.PathParameters[i];
route = route.replace(`{${name}}`, args[0]);
args.splice(0,1);
}
return route;
}
toFinal(node:SNode, route:string) {
let api:any = {};
if ( node.Class ) {
api = (arg:any) => {
let _route = route + node.Class.Route;
for(const parameter of node.Class.PathParameter) {
_route = _route.replace(`{${parameter}}`, arg);
}
return this.toFinal.bind(this)(node.Class, _route);
}
}
for(const property of node.Properties) {
api[property.FormatName] = this.toFinal.bind(this)(property, route+property.Route);
}
for(const method of node.Methods) {
api[ method.fnName ] = (...args:any[]) => {
let _route = route;
if(method.PathParameters.length) {
_route += method.RelativeRoute;
}
return this.fnHandler(_route, method, this.personalKey, ...args)
};
}
return api;
}
}<|fim▁end|> | |
<|file_name|>hr.js<|end_file_name|><|fim▁begin|>/*
Copyright (c) 2003-2020, CKSource - Frederico Knabben. All rights reserved.
For licensing, see LICENSE.md or https://ckeditor.com/legal/ckeditor-oss-license
*/
CKEDITOR.plugins.setLang( 'colorbutton', 'hr', {
auto: 'Automatski',
bgColorTitle: 'Boja pozadine',
colors: {
'000': 'Crna',
'800000': 'Kesten',
'8B4513': 'Smeđa',
'2F4F4F': 'Tamno siva',
'008080': 'Teal',
'000080': 'Mornarska',
'4B0082': 'Indigo',
'696969': 'Tamno siva',
B22222: 'Vatrena cigla',
A52A2A: 'Smeđa',
DAA520: 'Zlatna',
'006400': 'Tamno zelena',
'40E0D0': 'Tirkizna',
'0000CD': 'Srednje plava',
'800080': 'Ljubičasta',
'808080': 'Siva',
F00: 'Crvena',
FF8C00: 'Tamno naranđasta',
FFD700: 'Zlatna',
'008000': 'Zelena',
'0FF': 'Cijan',
'00F': 'Plava',
EE82EE: 'Ljubičasta',
A9A9A9: 'Mutno siva',
FFA07A: 'Svijetli losos',
FFA500: 'Naranđasto',
FFFF00: 'Žuto',
'00FF00': 'Limun',
AFEEEE: 'Blijedo tirkizna',
ADD8E6: 'Svijetlo plava',
DDA0DD: 'Šljiva',
D3D3D3: 'Svijetlo siva',
FFF0F5: 'Lavanda rumeno',
FAEBD7: 'Antikno bijela',
FFFFE0: 'Svijetlo žuta',
F0FFF0: 'Med',
F0FFFF: 'Azurna',
F0F8FF: 'Alice plava',
E6E6FA: 'Lavanda',
FFF: 'Bijela',
'1ABC9C': 'Jaka cijan',
'2ECC71': 'Emerald',
'3498DB': 'Svijetlo plava',
'9B59B6': 'Ametist',
'4E5F70': 'Sivkasto plava',
'F1C40F': 'Žarka žuta',
'16A085': 'Tamna cijan',
'27AE60': 'Tamna emerald',
'2980B9': 'Jaka plava',
'8E44AD': 'Tamno ljubičasta',
'2C3E50': 'Desatuirarana plava',<|fim▁hole|> 'E74C3C': 'Blijedo crvena',
'ECF0F1': 'Sjana srebrna',
'95A5A6': 'Svijetlo sivkasta cijan',
'DDD': 'Svijetlo siva',
'D35400': 'Tikva',
'C0392B': 'Jaka crvena',
'BDC3C7': 'Srebrna',
'7F8C8D': 'Sivkasto cijan',
'999': 'Tamno siva'
},
more: 'Više boja...',
panelTitle: 'Boje',
textColorTitle: 'Boja teksta'
} );<|fim▁end|> | 'F39C12': 'Narančasta',
'E67E22': 'Mrkva', |
<|file_name|>AmendmentRequests.js<|end_file_name|><|fim▁begin|>/**
GaiaEHR (Electronic Health Records)
Copyright (C) 2013 Certun, LLC.
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
Ext.define('App.store.patient.AmendmentRequests', {
extend: 'Ext.data.Store',
model: 'App.model.patient.AmendmentRequest'<|fim▁hole|>});<|fim▁end|> | |
<|file_name|>suffix.ts<|end_file_name|><|fim▁begin|>/**
* @license
* Copyright Google LLC All Rights Reserved.
*
* Use of this source code is governed by an MIT-style license that can be
* found in the LICENSE file at https://angular.io/license
*/
import {Directive} from '@angular/core';
<|fim▁hole|>})
export class MatSuffix {}<|fim▁end|> |
/** Suffix to be placed at the end of the form field. */
@Directive({
selector: '[matSuffix]', |
<|file_name|>authentication.js<|end_file_name|><|fim▁begin|>const express = require('express');
const router = express.Router();
const bodyParser = require('body-parser');
const { validateSignInForm, isLoggedIn } = require('../middlewares/validation');
const { signOutUser } = require('../../models/helper-functions');
const user = require('../../models/users');
const reviews = require('../../models/reviews');
const urlEncodedParser = bodyParser.urlencoded({ extended: false });
router.get('/sign-up', (req, res) => {<|fim▁hole|>
router.route('/sign-in')
.get((req, res) => {
res.render('sign-in', { error: false });
})
.post(urlEncodedParser, validateSignInForm, (req, res, next) => {
const credentials = req.body;
user.loginByEmail(credentials, req)
.then((user) => {
res.redirect(`users/${user.id}`);
})
.catch((error) => {
console.log('An error occured while logging in user::', error);
next(new Error('incorrect email and/or password'));
})
});
router.get('/sign-out', (req, res) => {
signOutUser(req);
res.redirect('/?isloggedIn=false');
})
module.exports = router;<|fim▁end|> | res.render('sign-up', { error: false });
}); |
<|file_name|>edwards_fah_2013a.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
# The Hazard Library
# Copyright (C) 2013-2014, GEM Foundation
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
Module exports
:class:`EdwardsFah2013Alpine10Bars`,
:class:`EdwardsFah2013Alpine20Bars`,
:class:`EdwardsFah2013Alpine30Bars`,
:class:`EdwardsFah2013Alpine50Bars`,
:class:`EdwardsFah2013Alpine60Bars`,
:class:`EdwardsFah2013Alpine75Bars`,
:class:`EdwardsFah2013Alpine90Bars`,
:class:`EdwardsFah2013Alpine120Bars`.
"""
from __future__ import division
import numpy as np
from scipy.constants import g
from openquake.hazardlib.gsim.base import GMPE
from openquake.hazardlib import const
from openquake.hazardlib.imt import PGV, PGA, SA<|fim▁hole|> COEFFS_ALPINE_20Bars,
COEFFS_ALPINE_30Bars,
COEFFS_ALPINE_50Bars,
COEFFS_ALPINE_75Bars,
COEFFS_ALPINE_90Bars,
COEFFS_ALPINE_120Bars
)
from openquake.hazardlib.gsim.utils_swiss_gmpe import (
_compute_phi_ss,
_compute_C1_term
)
class EdwardsFah2013Alpine10Bars(GMPE):
"""
This function implements the GMPE developed by Ben Edwars and Donath Fah
and published as "A Stochastic Ground-Motion Model for Switzerland"
Bulletin of the Seismological Society of America,
Vol. 103, No. 1, pp. 78–98, February 2013.
The GMPE was parametrized by Carlo Cauzzi to be implemented in OpenQuake.
This class implements the equations for 'Alpine' and 'Foreland - two
tectonic regionalizations defined for the Switzerland -
therefore this GMPE is region specific".
@ implemented by [email protected]
"""
#: Supported tectonic region type is ALPINE which
#: is a sub-region of Active Shallow Crust.
DEFINED_FOR_TECTONIC_REGION_TYPE = const.TRT.ACTIVE_SHALLOW_CRUST
#: Supported intensity measure types are spectral acceleration,
#: and peak ground acceleration, see tables 3 and 4, pages 227 and 228.
DEFINED_FOR_INTENSITY_MEASURE_TYPES = set([
PGV,
PGA,
SA
])
#: Supported intensity measure component is the geometric mean of two
#: horizontal components
#: :attr:`~openquake.hazardlib.const.IMC.AVERAGE_HORIZONTAL`
DEFINED_FOR_INTENSITY_MEASURE_COMPONENT = const.IMC.AVERAGE_HORIZONTAL
#: Supported standard deviation type is total,
#: Carlo Cauzzi - Personal Communication
DEFINED_FOR_STANDARD_DEVIATION_TYPES = set([
const.StdDev.TOTAL
])
#: Required site parameter is only Vs30 (used to distinguish rock
#: and deep soil).
REQUIRES_SITES_PARAMETERS = set(('vs30', ))
#: Required rupture parameters: magnitude
REQUIRES_RUPTURE_PARAMETERS = set(('mag', 'rake'))
#: Required distance measure is Rrup
REQUIRES_DISTANCES = set(('rrup', ))
#: Vs30 value representing typical rock conditions in Switzerland.
#: confirmed by the Swiss GMPE group
ROCK_VS30 = 1105
def get_mean_and_stddevs(self, sites, rup, dists, imt, stddev_types):
"""
See :meth:`superclass method
<.base.GroundShakingIntensityModel.get_mean_and_stddevs>`
for spec of input and result values.
"""
COEFFS = self.COEFFS[imt]
R = self._compute_term_r(COEFFS, rup.mag, dists.rrup)
mean = 10 ** (self._compute_mean(COEFFS, rup.mag, R))
# Convert units to g,
# but only for PGA and SA (not PGV):
if isinstance(imt, (PGA, SA)):
mean = np.log(mean / (g*100.))
else:
# PGV:
mean = np.log(mean)
c1_rrup = _compute_C1_term(COEFFS, dists.rrup)
log_phi_ss = 1.00
stddevs = self._get_stddevs(
COEFFS, stddev_types, sites.vs30.shape[0], rup.mag, c1_rrup,
log_phi_ss, COEFFS['mean_phi_ss']
)
return mean, stddevs
def _get_stddevs(self, C, stddev_types, num_sites, mag, c1_rrup,
log_phi_ss, mean_phi_ss):
"""
Return standard deviations
"""
phi_ss = _compute_phi_ss(C, mag, c1_rrup, log_phi_ss, mean_phi_ss)
stddevs = []
for stddev_type in stddev_types:
assert stddev_type in self.DEFINED_FOR_STANDARD_DEVIATION_TYPES
if stddev_type == const.StdDev.TOTAL:
stddevs.append(np.sqrt(
C['tau'] * C['tau'] +
phi_ss * phi_ss) +
np.zeros(num_sites))
elif stddev_type == const.StdDev.INTRA_EVENT:
stddevs.append(phi_ss + np.zeros(num_sites))
elif stddev_type == const.StdDev.INTER_EVENT:
stddevs.append(C['tau'] + np.zeros(num_sites))
return stddevs
def _compute_term_r(self, C, mag, rrup):
"""
Compute distance term
d = log10(max(R,rmin));
"""
if mag > self.M1:
rrup_min = 0.55
elif mag > self.M2:
rrup_min = -2.80 * mag + 14.55
else:
rrup_min = -0.295 * mag + 2.65
R = np.maximum(rrup, rrup_min)
return np.log10(R)
def _compute_term_1(self, C, mag):
"""
Compute term 1
a1 + a2.*M + a3.*M.^2 + a4.*M.^3 + a5.*M.^4 + a6.*M.^5 + a7.*M.^6
"""
return (
C['a1'] + C['a2'] * mag + C['a3'] *
np.power(mag, 2) + C['a4'] * np.power(mag, 3)
+ C['a5'] * np.power(mag, 4) + C['a6'] *
np.power(mag, 5) + C['a7'] * np.power(mag, 6)
)
def _compute_term_2(self, C, mag, R):
"""
(a8 + a9.*M + a10.*M.*M + a11.*M.*M.*M).*d(r)
"""
return (
(C['a8'] + C['a9'] * mag + C['a10'] * np.power(mag, 2) +
C['a11'] * np.power(mag, 3)) * R
)
def _compute_term_3(self, C, mag, R):
"""
(a12 + a13.*M + a14.*M.*M + a15.*M.*M.*M).*(d(r).^2)
"""
return (
(C['a12'] + C['a13'] * mag + C['a14'] * np.power(mag, 2) +
C['a15'] * np.power(mag, 3)) * np.power(R, 2)
)
def _compute_term_4(self, C, mag, R):
"""
(a16 + a17.*M + a18.*M.*M + a19.*M.*M.*M).*(d(r).^3)
"""
return (
(C['a16'] + C['a17'] * mag + C['a18'] * np.power(mag, 2) +
C['a19'] * np.power(mag, 3)) * np.power(R, 3)
)
def _compute_term_5(self, C, mag, R):
"""
(a20 + a21.*M + a22.*M.*M + a23.*M.*M.*M).*(d(r).^4)
"""
return (
(C['a20'] + C['a21'] * mag + C['a22'] * np.power(mag, 2) +
C['a23'] * np.power(mag, 3)) * np.power(R, 4)
)
def _compute_mean(self, C, mag, term_dist_r):
"""
compute mean
"""
return (self._compute_term_1(C, mag) +
self._compute_term_2(C, mag, term_dist_r) +
self._compute_term_3(C, mag, term_dist_r) +
self._compute_term_4(C, mag, term_dist_r) +
self._compute_term_5(C, mag, term_dist_r))
#: Fixed magnitude terms
M1 = 5.00
M2 = 4.70
COEFFS = COEFFS_ALPINE_10Bars
class EdwardsFah2013Alpine20Bars(EdwardsFah2013Alpine10Bars):
"""
This class extends :class:`EdwardsFah2013Alpine10Bars`
and implements the 20Bars Model :class:`EdwardsFah2013Alpine20Bars`
"""
COEFFS = COEFFS_ALPINE_20Bars
class EdwardsFah2013Alpine30Bars(EdwardsFah2013Alpine10Bars):
"""
This class extends :class:`EdwardsFah2013Alpine10Bars`
and implements the 30Bars Model :class:`EdwardsFah2013Alpine30Bars`
"""
COEFFS = COEFFS_ALPINE_30Bars
class EdwardsFah2013Alpine50Bars(EdwardsFah2013Alpine10Bars):
"""
This class extends :class:`EdwardsFah2013Alpine10Bars`
and implements the 50Bars Model :class:`EdwardsFah2013Alpine50Bars`
"""
COEFFS = COEFFS_ALPINE_50Bars
class EdwardsFah2013Alpine60Bars(EdwardsFah2013Alpine10Bars):
"""
This class extends :class:`EdwardsFah2013Alpine10Bars`
and implements the 60Bars Model :class:`EdwardsFah2013Alpine60Bars`
"""
COEFFS = COEFFS_ALPINE_60Bars
class EdwardsFah2013Alpine75Bars(EdwardsFah2013Alpine10Bars):
"""
This class extends :class:`EdwardsFah2013Alpine10Bars`
and implements the 75Bars Model :class:`EdwardsFah2013Alpine75Bars`
"""
COEFFS = COEFFS_ALPINE_75Bars
class EdwardsFah2013Alpine90Bars(EdwardsFah2013Alpine10Bars):
"""
This class extends :class:`EdwardsFah2013Alpine10Bars`
and implements the 90Bars Model :class:`EdwardsFah2013Alpine90Bars`
"""
COEFFS = COEFFS_ALPINE_90Bars
class EdwardsFah2013Alpine120Bars(EdwardsFah2013Alpine10Bars):
"""
This class extends :class:`EdwardsFah2013Alpine10Bars`
and implements the 120Bars Model :class:`EdwardsFah2013Alpine120Bars`
"""
COEFFS = COEFFS_ALPINE_120Bars<|fim▁end|> | from openquake.hazardlib.gsim.edwards_fah_2013a_coeffs import (
COEFFS_ALPINE_60Bars,
COEFFS_ALPINE_10Bars, |
<|file_name|>q-longStack-after.js<|end_file_name|><|fim▁begin|>var Q = require("q")<|fim▁hole|>Q.longStackSupport = true
function a() { Q.delay(100).done(b) }
function b() { throw new Error("foo") }
a()
// Error: foo
// at b (/path/to/snippets/q-longStack-after.js:5:22)
// From previous event:
// at a (/path/to/snippets/q-longStack-after.js:4:29)
// at Object.<anonymous> (/path/to/snippets/q-longStack-after.js:7:1)<|fim▁end|> | |
<|file_name|>generic-unique.rs<|end_file_name|><|fim▁begin|>// Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
struct Triple<T> { x: T, y: T, z: T }
<|fim▁hole|>fn box<T>(x: Triple<T>) -> ~Triple<T> { return ~x; }
pub fn main() {
let x: ~Triple<int> = box::<int>(Triple{x: 1, y: 2, z: 3});
assert_eq!(x.y, 2);
}<|fim▁end|> | |
<|file_name|>main.component.ts<|end_file_name|><|fim▁begin|>import { Component, OnInit } from '@angular/core';
import { Router, ActivatedRouteSnapshot, NavigationEnd, RoutesRecognized } from '@angular/router';
import { Title } from '@angular/platform-browser';
import { StateStorageService } from '../../shared';
@Component({
selector: 'jhi-main',
templateUrl: './main.component.html'
})
export class JhiMainComponent implements OnInit {
constructor(<|fim▁hole|> private titleService: Title,
private router: Router,
private $storageService: StateStorageService,
) {}
private getPageTitle(routeSnapshot: ActivatedRouteSnapshot) {
let title: string = (routeSnapshot.data && routeSnapshot.data['pageTitle']) ? routeSnapshot.data['pageTitle'] : 'tasksApp';
if (routeSnapshot.firstChild) {
title = this.getPageTitle(routeSnapshot.firstChild) || title;
}
return title;
}
ngOnInit() {
this.router.events.subscribe((event) => {
if (event instanceof NavigationEnd) {
this.titleService.setTitle(this.getPageTitle(this.router.routerState.snapshot.root));
}
});
}
}<|fim▁end|> | |
<|file_name|>query-milestones.py<|end_file_name|><|fim▁begin|>#!/usr/bin/python3
import argparse as ap
import shared
ACTIONS = dict()
def action(key):
def wrapper(function):
ACTIONS[key] = function
return function
return wrapper
def get_closed_issues(repo, milestone):
issues_and_prs = repo.get_issues(milestone=milestone, state="closed")
issues_only = [i for i in issues_and_prs if i.pull_request is None]
return issues_only
def get_closed_prs(repo, milestone):
issues_and_prs = repo.get_issues(milestone=milestone, state="closed")<|fim▁hole|>@action("issues-closed")
def print_closed_issues(repo, milestone):
for issue in get_closed_issues(repo, milestone):
print(issue.title)
@action("prs-merged")
def print_closed_prs(repo, milestone):
for pr in get_closed_prs(repo, milestone):
print(pr.title)
def create_parser():
parser = ap.ArgumentParser()
parser.add_argument("version", type=shared.version_type)
parser.add_argument("what", choices=(ACTIONS.keys()))
shared.update_parser_with_common_stuff(parser)
return parser
if __name__ == "__main__":
parser = create_parser()
args = parser.parse_args()
gh = shared.get_github(args)
repo = shared.get_repo(gh, "OpenSCAP")
milestone = shared.get_milestone(repo, args.version)
ACTIONS[args.what](repo, milestone)<|fim▁end|> | prs_only = [i for i in issues_and_prs if i.pull_request is not None]
return prs_only
|
<|file_name|>main.rs<|end_file_name|><|fim▁begin|>// Recap: The Rules of References
// 1. At any time, you can have *either* but not both of:
// a. One mutable reference
// b. Any number of immutable references
// 2. References must always be valid.
fn main() {
references();
mutable_references();
mutable_reference_scope_invalid();
mutable_reference_scope_valid();
immutable_references();
dangling_references();
}
fn references() {
let s = String::from("Hello");
let l = calculate_length(&s);
println!("The length of {} is {}", s, l);
}
fn mutable_references() {
let s = String::from("Hello");
// change(&mut s); // Err: cannot borrow immutable as mutable
let mut s = String::from("Hello"); // mutable variable
change(&mut s); // OK!
println!("The new string is {}", s);
}
fn mutable_reference_scope_invalid() {
let mut s = String::from("Hello");
// There can only be one mutable reference to a particular data
// in a particular scope:
let r = &mut s;
// let r2 = &mut s; // cannot borrow `s` as mutable more than once at a time
{
// let r2 = &mut s; // Same as above
}
}
fn mutable_reference_scope_valid() {
let mut s = String::from("Hello");
{
let r = &mut s;
r.push_str("GG");
} // r goes out of scope
let r2 = &mut s; // And now it's okay to create a new mutable reference
}
fn immutable_references() {
// It's okay to have multiple immutable references in the same scope
let s = String::from("Hello");
let r = &s; // OK!
let r2 = &s; // OK!
}
fn dangling_references() {
// let reference = dangle();
//
// fn dangle() -> &String {
// let s = String::from("Hello");
// &s // s is deallocated while the reference is returned
// }
}
fn calculate_length(s: &String) -> usize {
s.len()
}
// The following function will cause compile error
// fn change(s: &String) {
// s.push_str("GG"); // cannot borrow immutable borrowed content `*s` as mutable
// }
<|fim▁hole|>fn change(s: &mut String) {
s.push_str(", world");
}<|fim▁end|> | // The following function compiles fine |
<|file_name|>Gruntfile.js<|end_file_name|><|fim▁begin|>module.exports = function (grunt) {
"use strict";
grunt.initConfig({
dirs: {
css: 'app/css',
js: 'app/js',
sass: 'app/sass',
},
compass: {
dist: {
options: {
config: 'config.rb',
}
}
},
concat: {
options: {
seperator: ';',
},
dist: {
src: ['<%= dirs.js %>/modules/*.js', '<%= dirs.js %>/src/*.js'],
dest: '<%= dirs.js %>/app.js'
}
},
uglify: {
options: {
// mangle: false,
debug: true,
},
target: {
files: {
'<%= dirs.js %>/app.min.js': ['<%= dirs.js %>/app.js']
}
}
},
watch: {<|fim▁hole|> css: {
files: [
'<%= dirs.sass %>/*.scss',
'<%= dirs.sass %>/modules/*.scss',
'<%= dirs.sass %>/partials/*.scss'
],
tasks: ['css'],
options: {
spawn: false,
}
},
js: {
files: [
'<%= dirs.js %>/modules/*.js',
'<%= dirs.js %>/src/*.js'
],
tasks: ['js'],
options: {
spawn: false,
}
}
}
});
grunt.loadNpmTasks('grunt-contrib-compass');
grunt.loadNpmTasks('grunt-contrib-concat');
grunt.loadNpmTasks('grunt-contrib-uglify');
grunt.loadNpmTasks('grunt-contrib-watch');
grunt.registerTask('default', ['css', 'js']);
grunt.registerTask('css', ['compass']);
grunt.registerTask('js', ['concat', 'uglify']);
};<|fim▁end|> | |
<|file_name|>server.rs<|end_file_name|><|fim▁begin|>#![feature(core)]
#![feature(path_ext)]
extern crate eve;
extern crate getopts;
extern crate url;
extern crate core;
use std::thread;
use std::env;
use std::fs::PathExt;
use getopts::Options;
use std::net::SocketAddr;
use core::str::FromStr;
use eve::server;
use eve::static_server;<|fim▁hole|>
// handle command line arguments
let args: Vec<String> = env::args().collect();
// define the command line arguments
let mut opts = Options::new();
opts.optopt("f", "file-server-address", "specify a socket address for the static file server. Defaults to 0.0.0.0:8080","SOCKET ADDRESS");
opts.optopt("s", "saves", "specify the location of the saves directory","PATH");
opts.optflag("h", "help", "prints all options and usage");
// parse raw input arguments into options
let matches = match opts.parse(&args[1..]) {
Ok(m) => { m }
Err(f) => { panic!(f.to_string()) }
};
// print the help menu
if matches.opt_present("h") {
print!("{}", opts.usage(""));
return;
}
// parse static file server address
let default_addr = SocketAddr::from_str("0.0.0.0:8080").unwrap();
let addr = match matches.opt_str("f") {
Some(ip) => {
match SocketAddr::from_str(&*ip) {
Ok(addr) => addr,
Err(_) => {
println!("WARNING: Could not parse static file server address.\nDefaulting to {:?}",default_addr);
default_addr
}
}
},
None => default_addr,
};
// parse the saves directory
let default_saves_dir = "../saves/".to_owned();
let saves_dir = match matches.opt_str("s") {
Some(saves_dir) => saves_dir,
None => default_saves_dir,
};
let absolute_saves_dir = env::current_dir().unwrap().join(saves_dir).canonicalize().unwrap();
thread::spawn(move || static_server::run(addr.clone()));
server::run(absolute_saves_dir.as_path());
}<|fim▁end|> |
#[allow(dead_code)]
fn main() { |
<|file_name|>patch-buildtools_wafsamba_samba__conftests.py<|end_file_name|><|fim▁begin|>$NetBSD: patch-buildtools_wafsamba_samba__conftests.py,v 1.2 2019/11/10 17:01:58 adam Exp $
Ensure defines are strings to avoid assertion failure, some
returned values are unicode.
--- buildtools/wafsamba/samba_conftests.py.orig 2019-07-09 10:08:41.000000000 +0000
+++ buildtools/wafsamba/samba_conftests.py
@@ -97,9 +97,9 @@ def CHECK_LARGEFILE(conf, define='HAVE_L
if flag[:2] == "-D":
flag_split = flag[2:].split('=')
if len(flag_split) == 1:
- conf.DEFINE(flag_split[0], '1')
+ conf.DEFINE(str(flag_split[0]), '1')
else:
- conf.DEFINE(flag_split[0], flag_split[1])
+ conf.DEFINE(str(flag_split[0]), str(flag_split[1]))<|fim▁hole|><|fim▁end|> |
if conf.CHECK_CODE('if (sizeof(off_t) < 8) return 1',
define, |
<|file_name|>index.d.ts<|end_file_name|><|fim▁begin|>// Generated by typings
// Source: https://raw.githubusercontent.com/typed-typings/npm-assertion-error/105841317bd2bdd5d110bfb763e49e482a77230d/main.d.ts
declare module '~chai~assertion-error' {
// Type definitions for assertion-error 1.0.0
// Project: https://github.com/chaijs/assertion-error
// Definitions by: Bart van der Schoor <https://github.com/Bartvds>
// Definitions: https://github.com/borisyankov/DefinitelyTyped
export class AssertionError implements Error {
constructor(message: string, props?: any, ssf?: Function);
public name: string;
public message: string;
public showDiff: boolean;
public stack: string;
/**
* Allow errors to be converted to JSON for static transfer.
*
* @param {Boolean} include stack (default: `true`)
* @return {Object} object that can be `JSON.stringify`
*/
public toJSON(stack: boolean): Object;
}
}
// Generated by typings
// Source: https://raw.githubusercontent.com/typed-typings/npm-chai/0b70226aa4ea9c3b37fe1c709db0423f11ed30d8/lib/Assert.d.ts
declare module '~chai/lib/Assert' {
export interface AssertStatic extends Assert {
}
export interface Assert {
/**
* @param expression Expression to test for truthiness.
* @param message Message to display on error.
*/
(expression: any, message?: string): void;
(expression: any, messageCallback: () => string): void;
fail(actual?: any, expected?: any, msg?: string, operator?: string): void;
ok(val: any, msg?: string): void;
isOk(val: any, msg?: string): void;
notOk(val: any, msg?: string): void;
isNotOk(val: any, msg?: string): void;
equal(act: any, exp: any, msg?: string): void;
notEqual(act: any, exp: any, msg?: string): void;
strictEqual(act: any, exp: any, msg?: string): void;
notStrictEqual(act: any, exp: any, msg?: string): void;
deepEqual(act: any, exp: any, msg?: string): void;
notDeepEqual(act: any, exp: any, msg?: string): void;
isTrue(val: any, msg?: string): void;
isFalse(val: any, msg?: string): void;
isNotTrue(val: any, msg?: string): void;
isNotFalse(val: any, msg?: string): void;
isNull(val: any, msg?: string): void;
isNotNull(val: any, msg?: string): void;
isUndefined(val: any, msg?: string): void;
isDefined(val: any, msg?: string): void;
isNaN(val: any, msg?: string): void;
isNotNaN(val: any, msg?: string): void;
isAbove(val: number, abv: number, msg?: string): void;
isBelow(val: number, blw: number, msg?: string): void;
isAtLeast(val: number, atlst: number, msg?: string): void;
isAtMost(val: number, atmst: number, msg?: string): void;
isFunction(val: any, msg?: string): void;
isNotFunction(val: any, msg?: string): void;
isObject(val: any, msg?: string): void;
isNotObject(val: any, msg?: string): void;
isArray(val: any, msg?: string): void;
isNotArray(val: any, msg?: string): void;
isString(val: any, msg?: string): void;
isNotString(val: any, msg?: string): void;
isNumber(val: any, msg?: string): void;
isNotNumber(val: any, msg?: string): void;
isBoolean(val: any, msg?: string): void;
isNotBoolean(val: any, msg?: string): void;
typeOf(val: any, type: string, msg?: string): void;
notTypeOf(val: any, type: string, msg?: string): void;
instanceOf(val: any, type: Function, msg?: string): void;
notInstanceOf(val: any, type: Function, msg?: string): void;
include(exp: string, inc: any, msg?: string): void;
include(exp: any[], inc: any, msg?: string): void;
include(exp: Object, inc: Object, msg?: string): void;
notInclude(exp: string, inc: any, msg?: string): void;
notInclude(exp: any[], inc: any, msg?: string): void;
match(exp: any, re: RegExp, msg?: string): void;
notMatch(exp: any, re: RegExp, msg?: string): void;
property(obj: Object, prop: string, msg?: string): void;
notProperty(obj: Object, prop: string, msg?: string): void;
deepProperty(obj: Object, prop: string, msg?: string): void;
notDeepProperty(obj: Object, prop: string, msg?: string): void;
propertyVal(obj: Object, prop: string, val: any, msg?: string): void;
propertyNotVal(obj: Object, prop: string, val: any, msg?: string): void;
deepPropertyVal(obj: Object, prop: string, val: any, msg?: string): void;
deepPropertyNotVal(obj: Object, prop: string, val: any, msg?: string): void;
lengthOf(exp: any, len: number, msg?: string): void;
//alias frenzy
throw(fn: Function, msg?: string): void;
throw(fn: Function, regExp: RegExp): void;
throw(fn: Function, errType: Function, msg?: string): void;
throw(fn: Function, errType: Function, regExp: RegExp): void;
throws(fn: Function, msg?: string): void;
throws(fn: Function, regExp: RegExp): void;
throws(fn: Function, errType: Function, msg?: string): void;
throws(fn: Function, errType: Function, regExp: RegExp): void;
Throw(fn: Function, msg?: string): void;
Throw(fn: Function, regExp: RegExp): void;
Throw(fn: Function, errType: Function, msg?: string): void;
Throw(fn: Function, errType: Function, regExp: RegExp): void;
doesNotThrow(fn: Function, msg?: string): void;
doesNotThrow(fn: Function, regExp: RegExp): void;
doesNotThrow(fn: Function, errType: Function, msg?: string): void;
doesNotThrow(fn: Function, errType: Function, regExp: RegExp): void;
operator(val: any, operator: string, val2: any, msg?: string): void;
closeTo(act: number, exp: number, delta: number, msg?: string): void;
approximately(act: number, exp: number, delta: number, msg?: string): void;
sameMembers(set1: any[], set2: any[], msg?: string): void;
sameDeepMembers(set1: any[], set2: any[], msg?: string): void;
includeMembers(superset: any[], subset: any[], msg?: string): void;
includeDeepMembers(superset: any[], subset: any[], msg?: string): void;
ifError(val: any, msg?: string): void;
isExtensible(obj: {}, msg?: string): void;
extensible(obj: {}, msg?: string): void;
isNotExtensible(obj: {}, msg?: string): void;
notExtensible(obj: {}, msg?: string): void;
isSealed(obj: {}, msg?: string): void;
sealed(obj: {}, msg?: string): void;
isNotSealed(obj: {}, msg?: string): void;
notSealed(obj: {}, msg?: string): void;
isFrozen(obj: Object, msg?: string): void;
frozen(obj: Object, msg?: string): void;
isNotFrozen(obj: Object, msg?: string): void;
notFrozen(obj: Object, msg?: string): void;
oneOf(inList: any, list: any[], msg?: string): void;
changes(fn: Function, obj: {}, property: string): void;
doesNotChange(fn: Function, obj: {}, property: string): void;
increases(fn: Function, obj: {}, property: string): void;
doesNotIncrease(fn: Function, obj: {}, property: string): void;
decreases(fn: Function, obj: {}, property: string): void;
doesNotDecrease(fn: Function, obj: {}, property: string): void;
}
}
declare module 'chai/lib/Assert' {
export * from '~chai/lib/Assert';
}
// Generated by typings
// Source: https://raw.githubusercontent.com/typed-typings/npm-chai/0b70226aa4ea9c3b37fe1c709db0423f11ed30d8/lib/Assertion.d.ts
declare module '~chai/lib/Assertion' {
export interface AssertionStatic {
(target?: any, message?: string, stack?: Function): Assertion;
new (target?: any, message?: string, stack?: Function): Assertion;
}
export interface Assertion extends LanguageChains, NumericComparison, TypeComparison {
not: Assertion;
deep: Deep;
any: KeyFilter;
all: KeyFilter;
a: TypeComparison;
an: TypeComparison;
include: Include;
includes: Include;
contain: Include;
contains: Include;
ok: Assertion;
true: Assertion;
false: Assertion;
null: Assertion;
undefined: Assertion;
NaN: Assertion;
exist: Assertion;
empty: Assertion;
arguments: Assertion;
Arguments: Assertion;
equal: Equal;
equals: Equal;
eq: Equal;
eql: Equal;
eqls: Equal;
property: Property;
ownProperty: OwnProperty;
haveOwnProperty: OwnProperty;
ownPropertyDescriptor: OwnPropertyDescriptor;
haveOwnPropertyDescriptor: OwnPropertyDescriptor;
length: Length;
lengthOf: Length;
match: Match;
matches: Match;
string(str: string, message?: string): Assertion;
keys: Keys;
key(str: string): Assertion;
throw: Throw;
throws: Throw;
Throw: Throw;
respondTo: RespondTo;
respondsTo: RespondTo;
itself: Assertion;
satisfy: Satisfy;
satisfies: Satisfy;
closeTo: CloseTo;
approximately: CloseTo;
members: Members;
increase: PropertyChange;
increases: PropertyChange;
decrease: PropertyChange;
decreases: PropertyChange;
change: PropertyChange;
changes: PropertyChange;
extensible: Assertion;
sealed: Assertion;
frozen: Assertion;
oneOf(list: any[], message?: string): Assertion;
}
export interface LanguageChains {
to: Assertion;
be: Assertion;
been: Assertion;
is: Assertion;
that: Assertion;
which: Assertion;
and: Assertion;
has: Assertion;
have: Assertion;
with: Assertion;
at: Assertion;
of: Assertion;
same: Assertion;
}
export interface NumericComparison {
above: NumberComparer;
gt: NumberComparer;
greaterThan: NumberComparer;
least: NumberComparer;
gte: NumberComparer;
below: NumberComparer;
lt: NumberComparer;
lessThan: NumberComparer;
most: NumberComparer;
lte: NumberComparer;
within(start: number, finish: number, message?: string): Assertion;
}
export interface NumberComparer {
(value: number, message?: string): Assertion;
}
export interface TypeComparison {
(type: string, message?: string): Assertion;
instanceof: InstanceOf;
instanceOf: InstanceOf;
}
export interface InstanceOf {
(constructor: Object, message?: string): Assertion;
}
export interface CloseTo {
(expected: number, delta: number, message?: string): Assertion;
}
export interface Deep {
equal: Equal;
equals: Equal;
eq: Equal;
include: Include;
property: Property;
members: Members;
}
export interface KeyFilter {
keys: Keys;
}
export interface Equal {
(value: any, message?: string): Assertion;
}
export interface Property {
(name: string, value?: any, message?: string): Assertion;
}
export interface OwnProperty {
(name: string, message?: string): Assertion;
}
export interface OwnPropertyDescriptor {
(name: string, descriptor: PropertyDescriptor, message?: string): Assertion;
(name: string, message?: string): Assertion;
}
export interface Length extends LanguageChains, NumericComparison {
(length: number, message?: string): Assertion;
}
export interface Include {
(value: Object, message?: string): Assertion;
(value: string, message?: string): Assertion;
(value: number, message?: string): Assertion;
string(value: string, message?: string): Assertion;
keys: Keys;
members: Members;
any: KeyFilter;
all: KeyFilter;
}
export interface Match {
(regexp: RegExp | string, message?: string): Assertion;
}
export interface Keys {
(...keys: any[]): Assertion;
(keys: any[]): Assertion;
(keys: Object): Assertion;
}
export interface Throw {
(): Assertion;
(expected: string, message?: string): Assertion;
(expected: RegExp, message?: string): Assertion;
(constructor: Error, expected?: string, message?: string): Assertion;
(constructor: Error, expected?: RegExp, message?: string): Assertion;
(constructor: Function, expected?: string, message?: string): Assertion;
(constructor: Function, expected?: RegExp, message?: string): Assertion;
}
export interface RespondTo {
(method: string, message?: string): Assertion;
}
export interface Satisfy {
(matcher: Function, message?: string): Assertion;
}
export interface Members {
(set: any[], message?: string): Assertion;
}
export interface PropertyChange {
(object: Object, prop: string, msg?: string): Assertion;
}
}<|fim▁hole|>export * from '~chai/lib/Assertion';
}
// Generated by typings
// Source: https://raw.githubusercontent.com/typed-typings/npm-chai/0b70226aa4ea9c3b37fe1c709db0423f11ed30d8/lib/Expect.d.ts
declare module '~chai/lib/Expect' {
import {AssertionStatic} from '~chai/lib/Assertion';
export interface ExpectStatic extends AssertionStatic {
fail(actual?: any, expected?: any, message?: string, operator?: string): void;
}
}
declare module 'chai/lib/Expect' {
export * from '~chai/lib/Expect';
}
// Generated by typings
// Source: https://raw.githubusercontent.com/typed-typings/npm-chai/0b70226aa4ea9c3b37fe1c709db0423f11ed30d8/lib/Should.d.ts
declare module '~chai/lib/Should' {
export interface Should extends ShouldAssertion {
not: ShouldAssertion;
fail(actual: any, expected: any, message?: string, operator?: string): void;
}
export interface ShouldAssertion {
Throw: ShouldThrow;
throw: ShouldThrow;
equal(value1: any, value2: any, message?: string): void;
exist(value: any, message?: string): void;
}
export interface ShouldThrow {
(actual: Function): void;
(actual: Function, expected: string | RegExp, message?: string): void;
(actual: Function, constructor: Error | Function, expected?: string | RegExp, message?: string): void;
}
}
declare module 'chai/lib/Should' {
export * from '~chai/lib/Should';
}
// Generated by typings
// Source: https://raw.githubusercontent.com/typed-typings/npm-chai/0b70226aa4ea9c3b37fe1c709db0423f11ed30d8/lib/Config.d.ts
declare module '~chai/lib/Config' {
export interface Config {
includeStack: boolean;
truncateThreshold: number;
}
}
declare module 'chai/lib/Config' {
export * from '~chai/lib/Config';
}
// Generated by typings
// Source: https://raw.githubusercontent.com/typed-typings/npm-chai/0b70226aa4ea9c3b37fe1c709db0423f11ed30d8/lib/Utils.d.ts
declare module '~chai/lib/Utils' {
import {Assertion} from '~chai/lib/Assertion';
export interface Utils {
addChainableMethod(ctx: any, name: string, chainingBehavior: (value: any) => void);
addMethod(ctx: any, name: string, method: (value: any) => void);
addProperty(ctx: any, name: string, getter: () => void);
expectTypes(obj: Object, types: string[]);
flag(obj: Object, key: string, value?: any);
getActual(obj: Object, actual?: any);
getEnumerableProperties(obj: Object);
getMessage(obj: Object, params: any[]);
getMessage(obj: Object, message: string, negateMessage: string);
getName(func: Function);
getPathInfo(path: string, obj: Object);
getPathValue(path: string, obj: Object);
getProperties(obj: Object);
hasProperty(obj: Object, name: string);
transferFlags(assertion: Assertion | any, obj: Object, includeAll?: boolean);
inspect(obj: any);
}
}
declare module 'chai/lib/Utils' {
export * from '~chai/lib/Utils';
}
// Generated by typings
// Source: https://raw.githubusercontent.com/typed-typings/npm-chai/0b70226aa4ea9c3b37fe1c709db0423f11ed30d8/lib/Chai.d.ts
declare module '~chai/lib/Chai' {
import * as AE from '~chai~assertion-error';
import * as Assert from '~chai/lib/Assert';
import * as A from '~chai/lib/Assertion';
import * as Expect from '~chai/lib/Expect';
import * as Should from '~chai/lib/Should';
import * as Config from '~chai/lib/Config';
import * as Utils from '~chai/lib/Utils';
namespace chai {
export interface AssertionStatic extends A.AssertionStatic {}
export class AssertionError extends AE.AssertionError {}
export var Assertion: A.AssertionStatic;
export var expect: Expect.ExpectStatic;
export var assert: Assert.AssertStatic;
export var config: Config.Config;
export var util: Utils.Utils;
export function should(): Should.Should;
export function Should(): Should.Should;
/**
* Provides a way to extend the internals of Chai
*/
export function use(fn: (chai: any, utils: Utils.Utils) => void): typeof chai;
}
export = chai;
global {
interface Object {
should: A.Assertion;
}
}
}
declare module 'chai/lib/Chai' {
import main = require('~chai/lib/Chai');
export = main;
}
// Generated by typings
// Source: https://raw.githubusercontent.com/typed-typings/npm-chai/0b70226aa4ea9c3b37fe1c709db0423f11ed30d8/index.d.ts
declare module 'chai' {
// Type definitions for chai 3.4.0
// Project: http://chaijs.com/
// Original Definitions by: Jed Mao <https://github.com/jedmao/>,
// Bart van der Schoor <https://github.com/Bartvds>,
// Andrew Brown <https://github.com/AGBrown>,
// Olivier Chevet <https://github.com/olivr70>,
// Matt Wistrand <https://github.com/mwistrand>
import chai = require('~chai/lib/Chai');
export = chai;
}<|fim▁end|> | declare module 'chai/lib/Assertion' { |
<|file_name|>custom-adapters.component.spec.ts<|end_file_name|><|fim▁begin|>import { CommonModule } from '@angular/common';<|fim▁hole|>import { MatMenuModule } from '@angular/material/menu';
import { MatTableModule } from '@angular/material/table';
import { MatToolbarModule } from '@angular/material/toolbar';
import { NoopAnimationsModule } from '@angular/platform-browser/animations';
import { CustomAdaptersTableComponent } from '../custom-adapters-table/custom-adapters-table.component';
import { CustomAdaptersComponent } from './custom-adapters.component';
describe('Custom adapters component', () => {
let component: CustomAdaptersComponent;
let fixture: ComponentFixture<CustomAdaptersComponent>;
beforeEach(async(() => {
TestBed.configureTestingModule({
imports: [
MatTableModule,
MatIconModule,
MatToolbarModule,
MatMenuModule,
MatCheckboxModule,
CommonModule,
NoopAnimationsModule,
],
declarations: [CustomAdaptersComponent],
schemas: [NO_ERRORS_SCHEMA],
}).compileComponents();
}));
beforeEach(() => {
fixture = TestBed.createComponent(CustomAdaptersComponent);
component = fixture.componentInstance;
fixture.detectChanges();
});
it('should create', () => {
expect(component).toBeTruthy();
});
it('should emit event when apply clicked', () => {
spyOn(component.saveConfigurationEmitter, 'emit');
component.customAdapters = { adapters: [] } as CustomAdaptersTableComponent;
component.configureCustomAdapters();
expect(component.saveConfigurationEmitter.emit).toHaveBeenCalled();
});
it('should emit event when cancel clicked', () => {
spyOn(component.closeConfiguratorEmitter, 'emit');
component.cancelConfigureCustomAdapters();
expect(component.closeConfiguratorEmitter.emit).toHaveBeenCalled();
});
});<|fim▁end|> | import { NO_ERRORS_SCHEMA } from '@angular/core';
import { async, ComponentFixture, TestBed } from '@angular/core/testing';
import { MatCheckboxModule } from '@angular/material/checkbox';
import { MatIconModule } from '@angular/material/icon'; |
<|file_name|>celeba_formatting.py<|end_file_name|><|fim▁begin|># Copyright 2016 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
r"""CelebA dataset formating.
Download img_align_celeba.zip from
http://mmlab.ie.cuhk.edu.hk/projects/CelebA.html under the
link "Align&Cropped Images" in the "Img" directory and list_eval_partition.txt
under the link "Train/Val/Test Partitions" in the "Eval" directory. Then do:
unzip img_align_celeba.zip
Use the script as follow:
python celeba_formatting.py \
--partition_fn [PARTITION_FILE_PATH] \
--file_out [OUTPUT_FILE_PATH_PREFIX] \
--fn_root [CELEBA_FOLDER] \
--set [SUBSET_INDEX]
"""
import os
import os.path
import scipy.io
import scipy.io.wavfile
import scipy.ndimage<|fim▁hole|>import tensorflow as tf
tf.flags.DEFINE_string("file_out", "",
"Filename of the output .tfrecords file.")
tf.flags.DEFINE_string("fn_root", "", "Name of root file path.")
tf.flags.DEFINE_string("partition_fn", "", "Partition file path.")
tf.flags.DEFINE_string("set", "", "Name of subset.")
FLAGS = tf.flags.FLAGS
def _int64_feature(value):
return tf.train.Feature(int64_list=tf.train.Int64List(value=[value]))
def _bytes_feature(value):
return tf.train.Feature(bytes_list=tf.train.BytesList(value=[value]))
def main():
"""Main converter function."""
# Celeb A
with open(FLAGS.partition_fn, "r") as infile:
img_fn_list = infile.readlines()
img_fn_list = [elem.strip().split() for elem in img_fn_list]
img_fn_list = [elem[0] for elem in img_fn_list if elem[1] == FLAGS.set]
fn_root = FLAGS.fn_root
num_examples = len(img_fn_list)
file_out = "%s.tfrecords" % FLAGS.file_out
writer = tf.python_io.TFRecordWriter(file_out)
for example_idx, img_fn in enumerate(img_fn_list):
if example_idx % 1000 == 0:
print example_idx, "/", num_examples
image_raw = scipy.ndimage.imread(os.path.join(fn_root, img_fn))
rows = image_raw.shape[0]
cols = image_raw.shape[1]
depth = image_raw.shape[2]
image_raw = image_raw.tostring()
example = tf.train.Example(
features=tf.train.Features(
feature={
"height": _int64_feature(rows),
"width": _int64_feature(cols),
"depth": _int64_feature(depth),
"image_raw": _bytes_feature(image_raw)
}
)
)
writer.write(example.SerializeToString())
writer.close()
if __name__ == "__main__":
main()<|fim▁end|> | |
<|file_name|>torrent.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
# Copyright (c) 2008-2013 Erik Svensson <[email protected]>
# Licensed under the MIT license.
import sys
import datetime
from core.transmissionrpc.constants import PRIORITY, RATIO_LIMIT, IDLE_LIMIT
from core.transmissionrpc.utils import Field, format_timedelta
from six import integer_types, string_types, text_type, iteritems
def get_status_old(code):
"""Get the torrent status using old status codes"""
mapping = {
(1 << 0): 'check pending',
(1 << 1): 'checking',
(1 << 2): 'downloading',
(1 << 3): 'seeding',
(1 << 4): 'stopped',
}
return mapping[code]
def get_status_new(code):
"""Get the torrent status using new status codes"""
mapping = {
0: 'stopped',
1: 'check pending',
2: 'checking',
3: 'download pending',
4: 'downloading',
5: 'seed pending',
6: 'seeding',
}
return mapping[code]
class Torrent(object):
"""
Torrent is a class holding the data received from Transmission regarding a bittorrent transfer.
All fetched torrent fields are accessible through this class using attributes.
This class has a few convenience properties using the torrent data.
"""
def __init__(self, client, fields):
if 'id' not in fields:
raise ValueError('Torrent requires an id')
self._fields = {}
self._update_fields(fields)
self._incoming_pending = False
self._outgoing_pending = False
self._client = client
def _get_name_string(self, codec=None):
"""Get the name"""
if codec is None:
codec = sys.getdefaultencoding()
name = None
# try to find name
if 'name' in self._fields:
name = self._fields['name'].value
# if name is unicode, try to decode
if isinstance(name, text_type):
try:
name = name.encode(codec)
except UnicodeError:
name = None
return name
def __repr__(self):
tid = self._fields['id'].value
name = self._get_name_string()
if isinstance(name, str):
return '<Torrent {0:d} \"{1}\">'.format(tid, name)
else:
return '<Torrent {0:d}>'.format(tid)
def __str__(self):
name = self._get_name_string()
if isinstance(name, str):
return 'Torrent \"{0}\"'.format(name)
else:
return 'Torrent'
def __copy__(self):
return Torrent(self._client, self._fields)
def __getattr__(self, name):
try:
return self._fields[name].value
except KeyError:
raise AttributeError('No attribute {0}'.format(name))
def _rpc_version(self):
"""Get the Transmission RPC API version."""
if self._client:
return self._client.rpc_version
return 2
def _dirty_fields(self):
"""Enumerate changed fields"""
outgoing_keys = ['bandwidthPriority', 'downloadLimit', 'downloadLimited', 'peer_limit', 'queuePosition',
'seedIdleLimit', 'seedIdleMode', 'seedRatioLimit', 'seedRatioMode', 'uploadLimit',
'uploadLimited']
fields = []
for key in outgoing_keys:
if key in self._fields and self._fields[key].dirty:
fields.append(key)
return fields
def _push(self):
"""Push changed fields to the server"""
dirty = self._dirty_fields()
args = {}
for key in dirty:
args[key] = self._fields[key].value
self._fields[key] = self._fields[key]._replace(dirty=False)
if len(args) > 0:
self._client.change_torrent(self.id, **args)
def _update_fields(self, other):
"""
Update the torrent data from a Transmission JSON-RPC arguments dictionary
"""
if isinstance(other, dict):
for key, value in iteritems(other):
self._fields[key.replace('-', '_')] = Field(value, False)
elif isinstance(other, Torrent):
for key in list(other._fields.keys()):
self._fields[key] = Field(other._fields[key].value, False)
else:
raise ValueError('Cannot update with supplied data')
self._incoming_pending = False
def _status(self):
"""Get the torrent status"""
code = self._fields['status'].value
if self._rpc_version() >= 14:
return get_status_new(code)
else:
return get_status_old(code)
def files(self):
"""
Get list of files for this torrent.
This function returns a dictionary with file information for each file.
The file information is has following fields:
::
{
<file id>: {
'name': <file name>,
'size': <file size in bytes>,
'completed': <bytes completed>,
'priority': <priority ('high'|'normal'|'low')>,
'selected': <selected for download>
}
...
}
"""
result = {}
if 'files' in self._fields:
files = self._fields['files'].value
indices = range(len(files))
priorities = self._fields['priorities'].value
wanted = self._fields['wanted'].value
for item in zip(indices, files, priorities, wanted):
selected = True if item[3] else False
priority = PRIORITY[item[2]]
result[item[0]] = {
'selected': selected,
'priority': priority,
'size': item[1]['length'],
'name': item[1]['name'],
'completed': item[1]['bytesCompleted']}
return result
@property
def status(self):
"""
Returns the torrent status. Is either one of 'check pending', 'checking',
'downloading', 'seeding' or 'stopped'. The first two is related to
verification.
"""
return self._status()
@property
def progress(self):
"""Get the download progress in percent."""
try:
size = self._fields['sizeWhenDone'].value
left = self._fields['leftUntilDone'].value
return 100.0 * (size - left) / float(size)
except ZeroDivisionError:
return 0.0
@property
def ratio(self):
"""Get the upload/download ratio."""
return float(self._fields['uploadRatio'].value)
@property
def eta(self):
"""Get the "eta" as datetime.timedelta."""<|fim▁hole|> if eta >= 0:
return datetime.timedelta(seconds=eta)
else:
raise ValueError('eta not valid')
@property
def date_active(self):
"""Get the attribute "activityDate" as datetime.datetime."""
return datetime.datetime.fromtimestamp(self._fields['activityDate'].value)
@property
def date_added(self):
"""Get the attribute "addedDate" as datetime.datetime."""
return datetime.datetime.fromtimestamp(self._fields['addedDate'].value)
@property
def date_started(self):
"""Get the attribute "startDate" as datetime.datetime."""
return datetime.datetime.fromtimestamp(self._fields['startDate'].value)
@property
def date_done(self):
"""Get the attribute "doneDate" as datetime.datetime."""
return datetime.datetime.fromtimestamp(self._fields['doneDate'].value)
def format_eta(self):
"""
Returns the attribute *eta* formatted as a string.
* If eta is -1 the result is 'not available'
* If eta is -2 the result is 'unknown'
* Otherwise eta is formatted as <days> <hours>:<minutes>:<seconds>.
"""
eta = self._fields['eta'].value
if eta == -1:
return 'not available'
elif eta == -2:
return 'unknown'
else:
return format_timedelta(self.eta)
def _get_download_limit(self):
"""
Get the download limit.
Can be a number or None.
"""
if self._fields['downloadLimited'].value:
return self._fields['downloadLimit'].value
else:
return None
def _set_download_limit(self, limit):
"""
Get the download limit.
Can be a number, 'session' or None.
"""
if isinstance(limit, integer_types):
self._fields['downloadLimited'] = Field(True, True)
self._fields['downloadLimit'] = Field(limit, True)
self._push()
elif limit is None:
self._fields['downloadLimited'] = Field(False, True)
self._push()
else:
raise ValueError("Not a valid limit")
download_limit = property(_get_download_limit, _set_download_limit, None,
"Download limit in Kbps or None. This is a mutator.")
def _get_peer_limit(self):
"""
Get the peer limit.
"""
return self._fields['peer_limit'].value
def _set_peer_limit(self, limit):
"""
Set the peer limit.
"""
if isinstance(limit, integer_types):
self._fields['peer_limit'] = Field(limit, True)
self._push()
else:
raise ValueError("Not a valid limit")
peer_limit = property(_get_peer_limit, _set_peer_limit, None, "Peer limit. This is a mutator.")
def _get_priority(self):
"""
Get the priority as string.
Can be one of 'low', 'normal', 'high'.
"""
return PRIORITY[self._fields['bandwidthPriority'].value]
def _set_priority(self, priority):
"""
Set the priority as string.
Can be one of 'low', 'normal', 'high'.
"""
if isinstance(priority, string_types):
self._fields['bandwidthPriority'] = Field(PRIORITY[priority], True)
self._push()
priority = property(_get_priority, _set_priority, None
, "Bandwidth priority as string. Can be one of 'low', 'normal', 'high'. This is a mutator.")
def _get_seed_idle_limit(self):
"""
Get the seed idle limit in minutes.
"""
return self._fields['seedIdleLimit'].value
def _set_seed_idle_limit(self, limit):
"""
Set the seed idle limit in minutes.
"""
if isinstance(limit, integer_types):
self._fields['seedIdleLimit'] = Field(limit, True)
self._push()
else:
raise ValueError("Not a valid limit")
seed_idle_limit = property(_get_seed_idle_limit, _set_seed_idle_limit, None
, "Torrent seed idle limit in minutes. Also see seed_idle_mode. This is a mutator.")
def _get_seed_idle_mode(self):
"""
Get the seed ratio mode as string. Can be one of 'global', 'single' or 'unlimited'.
"""
return IDLE_LIMIT[self._fields['seedIdleMode'].value]
def _set_seed_idle_mode(self, mode):
"""
Set the seed ratio mode as string. Can be one of 'global', 'single' or 'unlimited'.
"""
if isinstance(mode, str):
self._fields['seedIdleMode'] = Field(IDLE_LIMIT[mode], True)
self._push()
else:
raise ValueError("Not a valid limit")
seed_idle_mode = property(_get_seed_idle_mode, _set_seed_idle_mode, None,
"""
Seed idle mode as string. Can be one of 'global', 'single' or 'unlimited'.
* global, use session seed idle limit.
* single, use torrent seed idle limit. See seed_idle_limit.
* unlimited, no seed idle limit.
This is a mutator.
"""
)
def _get_seed_ratio_limit(self):
"""
Get the seed ratio limit as float.
"""
return float(self._fields['seedRatioLimit'].value)
def _set_seed_ratio_limit(self, limit):
"""
Set the seed ratio limit as float.
"""
if isinstance(limit, (integer_types, float)) and limit >= 0.0:
self._fields['seedRatioLimit'] = Field(float(limit), True)
self._push()
else:
raise ValueError("Not a valid limit")
seed_ratio_limit = property(_get_seed_ratio_limit, _set_seed_ratio_limit, None
, "Torrent seed ratio limit as float. Also see seed_ratio_mode. This is a mutator.")
def _get_seed_ratio_mode(self):
"""
Get the seed ratio mode as string. Can be one of 'global', 'single' or 'unlimited'.
"""
return RATIO_LIMIT[self._fields['seedRatioMode'].value]
def _set_seed_ratio_mode(self, mode):
"""
Set the seed ratio mode as string. Can be one of 'global', 'single' or 'unlimited'.
"""
if isinstance(mode, str):
self._fields['seedRatioMode'] = Field(RATIO_LIMIT[mode], True)
self._push()
else:
raise ValueError("Not a valid limit")
seed_ratio_mode = property(_get_seed_ratio_mode, _set_seed_ratio_mode, None,
"""
Seed ratio mode as string. Can be one of 'global', 'single' or 'unlimited'.
* global, use session seed ratio limit.
* single, use torrent seed ratio limit. See seed_ratio_limit.
* unlimited, no seed ratio limit.
This is a mutator.
"""
)
def _get_upload_limit(self):
"""
Get the upload limit.
Can be a number or None.
"""
if self._fields['uploadLimited'].value:
return self._fields['uploadLimit'].value
else:
return None
def _set_upload_limit(self, limit):
"""
Set the upload limit.
Can be a number, 'session' or None.
"""
if isinstance(limit, integer_types):
self._fields['uploadLimited'] = Field(True, True)
self._fields['uploadLimit'] = Field(limit, True)
self._push()
elif limit is None:
self._fields['uploadLimited'] = Field(False, True)
self._push()
else:
raise ValueError("Not a valid limit")
upload_limit = property(_get_upload_limit, _set_upload_limit, None,
"Upload limit in Kbps or None. This is a mutator.")
def _get_queue_position(self):
"""Get the queue position for this torrent."""
if self._rpc_version() >= 14:
return self._fields['queuePosition'].value
else:
return 0
def _set_queue_position(self, position):
"""Set the queue position for this torrent."""
if self._rpc_version() >= 14:
if isinstance(position, integer_types):
self._fields['queuePosition'] = Field(position, True)
self._push()
else:
raise ValueError("Not a valid position")
else:
pass
queue_position = property(_get_queue_position, _set_queue_position, None, "Queue position")
def update(self, timeout=None):
"""Update the torrent information."""
self._push()
torrent = self._client.get_torrent(self.id, timeout=timeout)
self._update_fields(torrent)
def start(self, bypass_queue=False, timeout=None):
"""
Start the torrent.
"""
self._incoming_pending = True
self._client.start_torrent(self.id, bypass_queue=bypass_queue, timeout=timeout)
def stop(self, timeout=None):
"""Stop the torrent."""
self._incoming_pending = True
self._client.stop_torrent(self.id, timeout=timeout)
def move_data(self, location, timeout=None):
"""Move torrent data to location."""
self._incoming_pending = True
self._client.move_torrent_data(self.id, location, timeout=timeout)
def locate_data(self, location, timeout=None):
"""Locate torrent data at location."""
self._incoming_pending = True
self._client.locate_torrent_data(self.id, location, timeout=timeout)<|fim▁end|> | eta = self._fields['eta'].value |
<|file_name|>GLPlatformContext.cpp<|end_file_name|><|fim▁begin|>/*
* Copyright (C) 2012 Intel Corporation. All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
* are met:
* 1. Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
*
* THIS SOFTWARE IS PROVIDED BY APPLE INC. AND ITS CONTRIBUTORS ``AS IS''
* AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
* THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
* PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR ITS CONTRIBUTORS
* BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
* INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
* CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
* ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF
* THE POSSIBILITY OF SUCH DAMAGE.
*/
#include "config.h"
#include "GLPlatformContext.h"
#if USE(ACCELERATED_COMPOSITING)
#if USE(GLX)
#include "GLXContext.h"
#elif USE(EGL)
#include "EGLContext.h"
#endif
#include "NotImplemented.h"
namespace WebCore {
#if USE(OPENGL_ES_2)
static PFNGLGETGRAPHICSRESETSTATUSEXTPROC glGetGraphicsResetStatus = 0;
#else
static PFNGLGETGRAPHICSRESETSTATUSARBPROC glGetGraphicsResetStatus = 0;
#endif
static GLPlatformContext* m_currentContext = 0;
class GLCurrentContextWrapper : public GLPlatformContext {
public:
GLCurrentContextWrapper()
: GLPlatformContext()
{
#if USE(GLX)
m_contextHandle = glXGetCurrentContext();
#elif USE(EGL)
m_contextHandle = eglGetCurrentContext();
#endif
if (m_contextHandle)
m_currentContext = this;
}
virtual ~GLCurrentContextWrapper() { }
};
static PassOwnPtr<GLPlatformContext> createOffScreenContext()
{
#if USE(GLX)
return adoptPtr(new GLXOffScreenContext());
#elif USE(EGL)
return adoptPtr(new EGLOffScreenContext());
#else
return nullptr;
#endif
}
static HashSet<String> parseExtensions(const String& extensionsString)
{
Vector<String> extNames;
extensionsString.split(" ", extNames);
HashSet<String> splitExtNames;
unsigned size = extNames.size();
for (unsigned i = 0; i < size; ++i)
splitExtNames.add(extNames[i]);
extNames.clear();
return splitExtNames;
}
static void resolveResetStatusExtension()
{
static bool resolvedRobustnessExtension = false;
if (!resolvedRobustnessExtension) {
resolvedRobustnessExtension = true;
#if USE(OPENGL_ES_2)
glGetGraphicsResetStatus = reinterpret_cast<PFNGLGETGRAPHICSRESETSTATUSEXTPROC>(eglGetProcAddress("glGetGraphicsResetStatusEXT"));
#elif USE(EGL)
glGetGraphicsResetStatus = reinterpret_cast<PFNGLGETGRAPHICSRESETSTATUSARBPROC>(eglGetProcAddress("glGetGraphicsResetStatusARB"));
#elif USE(GLX)
glGetGraphicsResetStatus = reinterpret_cast<PFNGLGETGRAPHICSRESETSTATUSARBPROC>(glXGetProcAddressARB(reinterpret_cast<const GLubyte*>("glGetGraphicsResetStatusARB")));
#endif
}
}
PassOwnPtr<GLPlatformContext> GLPlatformContext::createContext(GraphicsContext3D::RenderStyle renderStyle)
{
#if !USE(OPENGL_ES_2)
if (!initializeOpenGLShims())
return nullptr;
#endif
switch (renderStyle) {
case GraphicsContext3D::RenderOffscreen:
if (OwnPtr<GLPlatformContext> context = createOffScreenContext())
return context.release();
break;
case GraphicsContext3D::RenderToCurrentGLContext:
if (OwnPtr<GLPlatformContext> context = adoptPtr(new GLCurrentContextWrapper()))
return context.release();
break;
case GraphicsContext3D::RenderDirectlyToHostWindow:
ASSERT_NOT_REACHED();
break;
}
return nullptr;
}
bool GLPlatformContext::supportsGLExtension(const String& name)
{
static HashSet<String> supportedExtensions;
if (!supportedExtensions.size()) {
String rawExtensions = reinterpret_cast<const char*>(::glGetString(GL_EXTENSIONS));
supportedExtensions = parseExtensions(rawExtensions);
}
if (supportedExtensions.contains(name))
return true;
return false;
}
#if USE(EGL)
bool GLPlatformContext::supportsEGLExtension(EGLDisplay display, const String& name)
{
static HashSet<String> supportedExtensions;
if (!supportedExtensions.size()) {
if (display == EGL_NO_DISPLAY)
return false;
String rawExtensions = reinterpret_cast<const char*>(eglQueryString(display, EGL_EXTENSIONS));
supportedExtensions = parseExtensions(rawExtensions);
}
if (supportedExtensions.contains(name))
return true;
return false;
}
#endif
#if USE(GLX)
bool GLPlatformContext::supportsGLXExtension(Display* display, const String& name)
{
static HashSet<String> supportedExtensions;
if (!supportedExtensions.size()) {
if (!display)
return false;
String rawExtensions = glXQueryExtensionsString(display, DefaultScreen(display));
supportedExtensions = parseExtensions(rawExtensions);
}
if (supportedExtensions.contains(name))
return true;
return false;
}
#endif
GLPlatformContext::GLPlatformContext()
: m_contextHandle(0)
, m_resetLostContext(false)
{
}
GLPlatformContext::~GLPlatformContext()
{
if (this == m_currentContext)
m_currentContext = 0;
}
bool GLPlatformContext::makeCurrent(GLPlatformSurface* surface)
{
m_contextLost = false;
if (m_currentContext == this && (!surface || surface->isCurrentDrawable()))
return true;
m_currentContext = 0;
if (!surface || (surface && !surface->drawable()))
platformReleaseCurrent();
else if (platformMakeCurrent(surface)) {
m_currentContext = this;
surface->onMakeCurrent();
}
if (m_resetLostContext) {
resolveResetStatusExtension();
if (glGetGraphicsResetStatus) {
GLenum status = glGetGraphicsResetStatus();
switch (status) {
case PLATFORMCONTEXT_NO_ERROR:
break;
case PLATFORMCONTEXT_GUILTY_CONTEXT_RESET:
m_contextLost = true;
break;
case PLATFORMCONTEXT_INNOCENT_CONTEXT_RESET:
break;
case PLATFORMCONTEXT_UNKNOWN_CONTEXT_RESET:
m_contextLost = true;
break;
default:
break;
}
}
}
return m_currentContext;
}
bool GLPlatformContext::isValid() const
{
return !m_contextLost;
}
void GLPlatformContext::releaseCurrent()
{
if (this == m_currentContext) {
m_currentContext = 0;
platformReleaseCurrent();
}
}
PlatformContext GLPlatformContext::handle() const
{
return m_contextHandle;
}
bool GLPlatformContext::isCurrentContext() const<|fim▁hole|>
bool GLPlatformContext::initialize(GLPlatformSurface*, PlatformContext)
{
return true;
}
GLPlatformContext* GLPlatformContext::getCurrent()
{
return m_currentContext;
}
bool GLPlatformContext::platformMakeCurrent(GLPlatformSurface*)
{
return true;
}
void GLPlatformContext::platformReleaseCurrent()
{
notImplemented();
}
void GLPlatformContext::destroy()
{
m_contextHandle = 0;
m_resetLostContext = false;
}
} // namespace WebCore
#endif<|fim▁end|> | {
return true;
} |
<|file_name|>reportview.py<|end_file_name|><|fim▁begin|># Copyright (c) 2015, Frappe Technologies Pvt. Ltd. and Contributors
# MIT License. See license.txt
from __future__ import unicode_literals
"""build query for doclistview and return results"""
import frappe, json
import frappe.permissions
from frappe.model.db_query import DatabaseQuery
from frappe import _
@frappe.whitelist()
def get():
args = get_form_params()
data = compress(execute(**args), args = args)
return data
def execute(doctype, *args, **kwargs):
return DatabaseQuery(doctype).execute(*args, **kwargs)
def get_form_params():
"""Stringify GET request parameters."""
data = frappe._dict(frappe.local.form_dict)
del data["cmd"]
if isinstance(data.get("filters"), basestring):
data["filters"] = json.loads(data["filters"])
if isinstance(data.get("fields"), basestring):
data["fields"] = json.loads(data["fields"])
if isinstance(data.get("docstatus"), basestring):
data["docstatus"] = json.loads(data["docstatus"])
if isinstance(data.get("save_user_settings"), basestring):
data["save_user_settings"] = json.loads(data["save_user_settings"])
else:
data["save_user_settings"] = True
# queries must always be server side
data.query = None
return data
def compress(data, args = {}):
"""separate keys and values"""
from frappe.desk.query_report import add_total_row
if not data: return data
values = []
keys = data[0].keys()
for row in data:
new_row = []
for key in keys:
new_row.append(row[key])
values.append(new_row)
if args.get("add_total_row"):
meta = frappe.get_meta(args.doctype)
values = add_total_row(values, keys, meta)
return {
"keys": keys,
"values": values
}
@frappe.whitelist()
def save_report():
"""save report"""
data = frappe.local.form_dict
if frappe.db.exists('Report', data['name']):
d = frappe.get_doc('Report', data['name'])
else:
d = frappe.new_doc('Report')
d.report_name = data['name']
d.ref_doctype = data['doctype']
d.report_type = "Report Builder"
d.json = data['json']
frappe.get_doc(d).save()
frappe.msgprint(_("{0} is saved").format(d.name))
return d.name
@frappe.whitelist()
def export_query():
"""export from report builder"""
form_params = get_form_params()
form_params["limit_page_length"] = None
form_params["as_list"] = True
doctype = form_params.doctype
add_totals_row = None
file_format_type = form_params["file_format_type"]
del form_params["doctype"]
del form_params["file_format_type"]
if 'add_totals_row' in form_params and form_params['add_totals_row']=='1':
add_totals_row = 1
del form_params["add_totals_row"]
frappe.permissions.can_export(doctype, raise_exception=True)
db_query = DatabaseQuery(doctype)
ret = db_query.execute(**form_params)
if add_totals_row:
ret = append_totals_row(ret)
data = [['Sr'] + get_labels(db_query.fields, doctype)]
for i, row in enumerate(ret):
data.append([i+1] + list(row))
if file_format_type == "CSV":
# convert to csv
import csv
from cStringIO import StringIO
f = StringIO()
writer = csv.writer(f)
for r in data:
# encode only unicode type strings and not int, floats etc.
writer.writerow(map(lambda v: isinstance(v, unicode) and v.encode('utf-8') or v, r))
f.seek(0)
frappe.response['result'] = unicode(f.read(), 'utf-8')
frappe.response['type'] = 'csv'
frappe.response['doctype'] = doctype
elif file_format_type == "Excel":
from frappe.utils.xlsxutils import make_xlsx
xlsx_file = make_xlsx(data, doctype)
frappe.response['filename'] = doctype + '.xlsx'
frappe.response['filecontent'] = xlsx_file.getvalue()
frappe.response['type'] = 'binary'
def append_totals_row(data):
if not data:
return data
data = list(data)
totals = []
totals.extend([""]*len(data[0]))
for row in data:
for i in xrange(len(row)):
if isinstance(row[i], (float, int)):
totals[i] = (totals[i] or 0) + row[i]
data.append(totals)
return data
def get_labels(fields, doctype):
"""get column labels based on column names"""
labels = []
for key in fields:
key = key.split(" as ")[0]
if "." in key:
parenttype, fieldname = key.split(".")[0][4:-1], key.split(".")[1].strip("`")
else:
parenttype = doctype
fieldname = fieldname.strip("`")
df = frappe.get_meta(parenttype).get_field(fieldname)
label = df.label if df else fieldname.title()
if label in labels:
label = doctype + ": " + label
labels.append(label)
return labels
@frappe.whitelist()
def delete_items():
"""delete selected items"""
import json
il = json.loads(frappe.form_dict.get('items'))
doctype = frappe.form_dict.get('doctype')
for i, d in enumerate(il):
try:
frappe.delete_doc(doctype, d)
if len(il) >= 5:
frappe.publish_realtime("progress",
dict(progress=[i+1, len(il)], title=_('Deleting {0}').format(doctype)),
user=frappe.session.user)
except Exception:
pass
@frappe.whitelist()
def get_sidebar_stats(stats, doctype, filters=[]):
cat_tags = frappe.db.sql("""select tag.parent as category, tag.tag_name as tag
from `tabTag Doc Category` as docCat
INNER JOIN tabTag as tag on tag.parent = docCat.parent
where docCat.tagdoc=%s
ORDER BY tag.parent asc,tag.idx""",doctype,as_dict=1)
return {"defined_cat":cat_tags, "stats":get_stats(stats, doctype, filters)}
@frappe.whitelist()
def get_stats(stats, doctype, filters=[]):
"""get tag info"""
import json
tags = json.loads(stats)
if filters:
filters = json.loads(filters)
stats = {}
columns = frappe.db.get_table_columns(doctype)
for tag in tags:
if not tag in columns: continue
tagcount = frappe.get_list(doctype, fields=[tag, "count(*)"],
#filters=["ifnull(`%s`,'')!=''" % tag], group_by=tag, as_list=True)
filters = filters + ["ifnull(`%s`,'')!=''" % tag], group_by = tag, as_list = True)
if tag=='_user_tags':
stats[tag] = scrub_user_tags(tagcount)
stats[tag].append(["No Tags", frappe.get_list(doctype,
fields=[tag, "count(*)"],
filters=filters +["({0} = ',' or {0} is null)".format(tag)], as_list=True)[0][1]])
else:
stats[tag] = tagcount
return stats
<|fim▁hole|> import json
tags = json.loads(stats)
if filters:
filters = json.loads(filters)
stats = {}
columns = frappe.db.get_table_columns(doctype)
for tag in tags:
if not tag["name"] in columns: continue
tagcount = []
if tag["type"] not in ['Date', 'Datetime']:
tagcount = frappe.get_list(doctype,
fields=[tag["name"], "count(*)"],
filters = filters + ["ifnull(`%s`,'')!=''" % tag["name"]],
group_by = tag["name"],
as_list = True)
if tag["type"] not in ['Check','Select','Date','Datetime','Int',
'Float','Currency','Percent'] and tag['name'] not in ['docstatus']:
stats[tag["name"]] = list(tagcount)
if stats[tag["name"]]:
data =["No Data", frappe.get_list(doctype,
fields=[tag["name"], "count(*)"],
filters=filters + ["({0} = '' or {0} is null)".format(tag["name"])],
as_list=True)[0][1]]
if data and data[1]!=0:
stats[tag["name"]].append(data)
else:
stats[tag["name"]] = tagcount
return stats
def scrub_user_tags(tagcount):
"""rebuild tag list for tags"""
rdict = {}
tagdict = dict(tagcount)
for t in tagdict:
if not t:
continue
alltags = t.split(',')
for tag in alltags:
if tag:
if not tag in rdict:
rdict[tag] = 0
rdict[tag] += tagdict[t]
rlist = []
for tag in rdict:
rlist.append([tag, rdict[tag]])
return rlist
# used in building query in queries.py
def get_match_cond(doctype):
cond = DatabaseQuery(doctype).build_match_conditions()
return ((' and ' + cond) if cond else "").replace("%", "%%")
def build_match_conditions(doctype, as_condition=True):
match_conditions = DatabaseQuery(doctype).build_match_conditions(as_condition=as_condition)
if as_condition:
return match_conditions.replace("%", "%%")
else:
return match_conditions
def get_filters_cond(doctype, filters, conditions):
if filters:
flt = filters
if isinstance(filters, dict):
filters = filters.items()
flt = []
for f in filters:
if isinstance(f[1], basestring) and f[1][0] == '!':
flt.append([doctype, f[0], '!=', f[1][1:]])
else:
value = frappe.db.escape(f[1]) if isinstance(f[1], basestring) else f[1]
flt.append([doctype, f[0], '=', value])
query = DatabaseQuery(doctype)
query.filters = flt
query.conditions = conditions
query.build_filter_conditions(flt, conditions)
cond = ' and ' + ' and '.join(query.conditions)
else:
cond = ''
return cond<|fim▁end|> | @frappe.whitelist()
def get_filter_dashboard_data(stats, doctype, filters=[]):
"""get tags info""" |
<|file_name|>constellation.rs<|end_file_name|><|fim▁begin|>/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
//! The `Constellation`, Servo's Grand Central Station
//!
//! The constellation tracks all information kept globally by the
//! browser engine, which includes:
//!
//! * The set of all `EventLoop` objects. Each event loop is
//! the constellation's view of a script thread. The constellation
//! interacts with a script thread by message-passing.
//!
//! * The set of all `Pipeline` objects. Each pipeline gives the
//! constellation's view of a `Window`, with its script thread and
//! layout threads. Pipelines may share script threads, but not
//! layout threads.
//!
//! * The set of all `Frame` objects. Each frame gives the constellation's
//! view of a browsing context. Each browsing context stores an independent
//! session history, created by navigation of that frame. The session
//! history can be traversed, for example by the back and forwards UI,
//! so each session history maintains a list of past and future pipelines,
//! as well as the current active pipeline.
//!
//! There are two kinds of frames: top-level frames (for example tabs
//! in a browser UI), and nested frames (typically caused by `iframe`
//! elements). Frames have a hierarchy (typically caused by `iframe`s
//! containing `iframe`s), giving rise to a frame tree with a root frame.
//! The logical relationship between these types is:
//!
//! ```
//! +---------+ +------------+ +-------------+
//! | Frame | --parent?--> | Pipeline | --event_loop--> | EventLoop |
//! | | --current--> | | | |
//! | | --prev*----> | | <---pipeline*-- | |
//! | | --next*----> | | +-------------+
//! | | | |
//! | | <----frame-- | |
//! +---------+ +------------+
//! ```
//
//! Complicating matters, there are also mozbrowser iframes, which are top-level
//! frames with a parent.
//!
//! The constellation also maintains channels to threads, including:
//!
//! * The script and layout threads.
//! * The graphics compositor.
//! * The font cache, image cache, and resource manager, which load
//! and cache shared fonts, images, or other resources.
//! * The service worker manager.
//! * The devtools, debugger and webdriver servers.
//!
//! The constellation passes messages between the threads, and updates its state
//! to track the evolving state of the frame tree.
//!
//! The constellation acts as a logger, tracking any `warn!` messages from threads,
//! and converting any `error!` or `panic!` into a crash report, which is filed
//! using an appropriate `mozbrowsererror` event.
//!
//! Since there is only one constellation, and its responsibilities include crash reporting,
//! it is very important that it does not panic.
use backtrace::Backtrace;
use bluetooth_traits::BluetoothRequest;
use canvas::canvas_paint_thread::CanvasPaintThread;
use canvas::webgl_paint_thread::WebGLPaintThread;
use canvas_traits::CanvasMsg;
use compositing::SendableFrameTree;
use compositing::compositor_thread::CompositorProxy;
use compositing::compositor_thread::Msg as ToCompositorMsg;
use debugger;
use devtools_traits::{ChromeToDevtoolsControlMsg, DevtoolsControlMsg};
use euclid::scale_factor::ScaleFactor;
use euclid::size::{Size2D, TypedSize2D};
use event_loop::EventLoop;
use frame::{Frame, FrameChange, FrameState, FrameTreeIterator, FullFrameTreeIterator};
use gfx::font_cache_thread::FontCacheThread;
use gfx_traits::Epoch;
use ipc_channel::{Error as IpcError};
use ipc_channel::ipc::{self, IpcSender, IpcReceiver};
use ipc_channel::router::ROUTER;
use layout_traits::LayoutThreadFactory;
use log::{Log, LogLevel, LogLevelFilter, LogMetadata, LogRecord};
use msg::constellation_msg::{FrameId, FrameType, PipelineId};
use msg::constellation_msg::{Key, KeyModifiers, KeyState};
use msg::constellation_msg::{PipelineNamespace, PipelineNamespaceId, TraversalDirection};
use net_traits::{self, IpcSend, ResourceThreads};
use net_traits::image_cache_thread::ImageCacheThread;
use net_traits::pub_domains::reg_host;
use net_traits::storage_thread::{StorageThreadMsg, StorageType};
use offscreen_gl_context::{GLContextAttributes, GLLimits};
use pipeline::{InitialPipelineState, Pipeline};
use profile_traits::mem;
use profile_traits::time;
use script_traits::{AnimationState, AnimationTickType, CompositorEvent};
use script_traits::{ConstellationControlMsg, ConstellationMsg as FromCompositorMsg, DiscardBrowsingContext};
use script_traits::{DocumentActivity, DocumentState, LayoutControlMsg, LoadData};
use script_traits::{IFrameLoadInfo, IFrameLoadInfoWithData, IFrameSandboxState, TimerEventRequest};
use script_traits::{LayoutMsg as FromLayoutMsg, ScriptMsg as FromScriptMsg, ScriptThreadFactory};
use script_traits::{LogEntry, ServiceWorkerMsg, webdriver_msg};
use script_traits::{MozBrowserErrorType, MozBrowserEvent, WebDriverCommandMsg, WindowSizeData};
use script_traits::{SWManagerMsg, ScopeThings, WindowSizeType};
use script_traits::WebVREventMsg;
use serde::{Deserialize, Serialize};
use servo_config::opts;
use servo_config::prefs::PREFS;
use servo_rand::{Rng, SeedableRng, ServoRng, random};
use servo_remutex::ReentrantMutex;
use servo_url::ServoUrl;
use std::borrow::ToOwned;
use std::collections::{HashMap, VecDeque};
use std::iter::once;
use std::marker::PhantomData;
use std::process;
use std::rc::{Rc, Weak};
use std::sync::Arc;
use std::sync::mpsc::{Receiver, Sender, channel};
use std::thread;
use std::time::Instant;
use style_traits::CSSPixel;
use style_traits::cursor::Cursor;
use style_traits::viewport::ViewportConstraints;
use timer_scheduler::TimerScheduler;
use webrender_traits;
use webvr_traits::WebVRMsg;
/// The `Constellation` itself. In the servo browser, there is one
/// constellation, which maintains all of the browser global data.
/// In embedded applications, there may be more than one constellation,
/// which are independent of each other.
///
/// The constellation may be in a different process from the pipelines,
/// and communicates using IPC.
///
/// It is parameterized over a `LayoutThreadFactory` and a
/// `ScriptThreadFactory` (which in practice are implemented by
/// `LayoutThread` in the `layout` crate, and `ScriptThread` in
/// the `script` crate). Script and layout communicate using a `Message`
/// type.
pub struct Constellation<Message, LTF, STF> {
/// An IPC channel for script threads to send messages to the constellation.
/// This is the script threads' view of `script_receiver`.
script_sender: IpcSender<FromScriptMsg>,
/// A channel for the constellation to receive messages from script threads.
/// This is the constellation's view of `script_sender`.
script_receiver: Receiver<Result<FromScriptMsg, IpcError>>,
/// An IPC channel for layout threads to send messages to the constellation.
/// This is the layout threads' view of `layout_receiver`.
layout_sender: IpcSender<FromLayoutMsg>,
/// A channel for the constellation to receive messages from layout threads.
/// This is the constellation's view of `layout_sender`.
layout_receiver: Receiver<Result<FromLayoutMsg, IpcError>>,
/// A channel for the constellation to receive messages from the compositor thread.
compositor_receiver: Receiver<FromCompositorMsg>,
/// A channel (the implementation of which is port-specific) for the
/// constellation to send messages to the compositor thread.
compositor_proxy: Box<CompositorProxy>,
/// Channels for the constellation to send messages to the public
/// resource-related threads. There are two groups of resource
/// threads: one for public browsing, and one for private
/// browsing.
public_resource_threads: ResourceThreads,
/// Channels for the constellation to send messages to the private
/// resource-related threads. There are two groups of resource
/// threads: one for public browsing, and one for private
/// browsing.
private_resource_threads: ResourceThreads,
/// A channel for the constellation to send messages to the image
/// cache thread.
image_cache_thread: ImageCacheThread,
/// A channel for the constellation to send messages to the font
/// cache thread.
font_cache_thread: FontCacheThread,
/// A channel for the constellation to send messages to the
/// debugger thread.
debugger_chan: Option<debugger::Sender>,
/// A channel for the constellation to send messages to the
/// devtools thread.
devtools_chan: Option<Sender<DevtoolsControlMsg>>,
/// An IPC channel for the constellation to send messages to the
/// bluetooth thread.
bluetooth_thread: IpcSender<BluetoothRequest>,
/// An IPC channel for the constellation to send messages to the
/// Service Worker Manager thread.
swmanager_chan: Option<IpcSender<ServiceWorkerMsg>>,
/// An IPC channel for Service Worker Manager threads to send
/// messages to the constellation. This is the SW Manager thread's
/// view of `swmanager_receiver`.
swmanager_sender: IpcSender<SWManagerMsg>,
/// A channel for the constellation to receive messages from the
/// Service Worker Manager thread. This is the constellation's view of
/// `swmanager_sender`.
swmanager_receiver: Receiver<Result<SWManagerMsg, IpcError>>,
/// A channel for the constellation to send messages to the
/// time profiler thread.
time_profiler_chan: time::ProfilerChan,
/// A channel for the constellation to send messages to the
/// memory profiler thread.
mem_profiler_chan: mem::ProfilerChan,
/// A channel for the constellation to send messages to the
/// timer thread.
scheduler_chan: IpcSender<TimerEventRequest>,
/// A channel for the constellation to send messages to the
/// Webrender thread.
webrender_api_sender: webrender_traits::RenderApiSender,
/// The set of all event loops in the browser. We generate a new
/// event loop for each registered domain name (aka eTLD+1) in
/// each top-level frame. We store the event loops in a map
/// indexed by top-level frame id (as a `FrameId`) and registered
/// domain name (as a `String`) to event loops. This double
/// indirection ensures that separate tabs do not share event
/// loops, even if the same domain is loaded in each.
/// It is important that scripts with the same eTLD+1
/// share an event loop, since they can use `document.domain`
/// to become same-origin, at which point they can share DOM objects.
event_loops: HashMap<FrameId, HashMap<String, Weak<EventLoop>>>,
/// The set of all the pipelines in the browser.
/// (See the `pipeline` module for more details.)
pipelines: HashMap<PipelineId, Pipeline>,
/// The set of all the frames in the browser.
frames: HashMap<FrameId, Frame>,
/// When a navigation is performed, we do not immediately update
/// the frame tree, instead we ask the event loop to begin loading
/// the new document, and do not update the frame tree until the
/// document is active. Between starting the load and it activating,
/// we store a `FrameChange` object for the navigation in progress.
pending_frames: Vec<FrameChange>,
/// The root frame.
root_frame_id: FrameId,
/// The currently focused pipeline for key events.
focus_pipeline_id: Option<PipelineId>,
/// Pipeline IDs are namespaced in order to avoid name collisions,
/// and the namespaces are allocated by the constellation.
next_pipeline_namespace_id: PipelineNamespaceId,
/// The size of the top-level window.
window_size: WindowSizeData,
/// Bits of state used to interact with the webdriver implementation
webdriver: WebDriverData,
/// Document states for loaded pipelines (used only when writing screenshots).
document_states: HashMap<PipelineId, DocumentState>,
/// Are we shutting down?
shutting_down: bool,
/// Have we seen any warnings? Hopefully always empty!
/// The buffer contains `(thread_name, reason)` entries.
handled_warnings: VecDeque<(Option<String>, String)>,
/// The random number generator and probability for closing pipelines.
/// This is for testing the hardening of the constellation.
random_pipeline_closure: Option<(ServoRng, f32)>,
/// Phantom data that keeps the Rust type system happy.
phantom: PhantomData<(Message, LTF, STF)>,
/// A channel through which messages can be sent to the webvr thread.
webvr_thread: Option<IpcSender<WebVRMsg>>,
}
/// State needed to construct a constellation.
pub struct InitialConstellationState {
/// A channel through which messages can be sent to the compositor.
pub compositor_proxy: Box<CompositorProxy + Send>,
/// A channel to the debugger, if applicable.
pub debugger_chan: Option<debugger::Sender>,
/// A channel to the developer tools, if applicable.
pub devtools_chan: Option<Sender<DevtoolsControlMsg>>,
/// A channel to the bluetooth thread.
pub bluetooth_thread: IpcSender<BluetoothRequest>,
/// A channel to the image cache thread.
pub image_cache_thread: ImageCacheThread,
/// A channel to the font cache thread.
pub font_cache_thread: FontCacheThread,
/// A channel to the resource thread.
pub public_resource_threads: ResourceThreads,
/// A channel to the resource thread.
pub private_resource_threads: ResourceThreads,
/// A channel to the time profiler thread.
pub time_profiler_chan: time::ProfilerChan,
/// A channel to the memory profiler thread.
pub mem_profiler_chan: mem::ProfilerChan,
/// Webrender API.
pub webrender_api_sender: webrender_traits::RenderApiSender,
/// Whether the constellation supports the clipboard.
/// TODO: this field is not used, remove it?
pub supports_clipboard: bool,
}
/// Data needed for webdriver
struct WebDriverData {
load_channel: Option<(PipelineId, IpcSender<webdriver_msg::LoadStatus>)>,
resize_channel: Option<IpcSender<WindowSizeData>>,
}
impl WebDriverData {
fn new() -> WebDriverData {
WebDriverData {
load_channel: None,
resize_channel: None,
}
}
}
/// When we are running reftests, we save an image to compare against a reference.
/// This enum gives the possible states of preparing such an image.
#[derive(Debug, PartialEq)]
enum ReadyToSave {
NoRootFrame,
PendingFrames,
WebFontNotLoaded,
DocumentLoading,
EpochMismatch,
PipelineUnknown,
Ready,
}
/// When we are exiting a pipeline, we can either force exiting or not.
/// A normal exit waits for the compositor to update its state before
/// exiting, and delegates layout exit to script. A forced exit does
/// not notify the compositor, and exits layout without involving script.
#[derive(Clone, Copy)]
enum ExitPipelineMode {
Normal,
Force,
}
/// The constellation uses logging to perform crash reporting.
/// The constellation receives all `warn!`, `error!` and `panic!` messages,
/// and generates a crash report when it receives a panic.
/// A logger directed at the constellation from content processes
#[derive(Clone)]
pub struct FromScriptLogger {
/// A channel to the constellation
pub constellation_chan: Arc<ReentrantMutex<IpcSender<FromScriptMsg>>>,
}
impl FromScriptLogger {
/// Create a new constellation logger.
pub fn new(constellation_chan: IpcSender<FromScriptMsg>) -> FromScriptLogger {
FromScriptLogger {
constellation_chan: Arc::new(ReentrantMutex::new(constellation_chan))
}
}
/// The maximum log level the constellation logger is interested in.
pub fn filter(&self) -> LogLevelFilter {
LogLevelFilter::Warn
}
}
impl Log for FromScriptLogger {
fn enabled(&self, metadata: &LogMetadata) -> bool {
metadata.level() <= LogLevel::Warn
}
fn log(&self, record: &LogRecord) {
if let Some(entry) = log_entry(record) {
debug!("Sending log entry {:?}.", entry);
let top_level_frame_id = FrameId::installed();
let thread_name = thread::current().name().map(ToOwned::to_owned);
let msg = FromScriptMsg::LogEntry(top_level_frame_id, thread_name, entry);
let chan = self.constellation_chan.lock().unwrap_or_else(|err| err.into_inner());
let _ = chan.send(msg);
}
}
}
/// A logger directed at the constellation from the compositor
#[derive(Clone)]
pub struct FromCompositorLogger {
/// A channel to the constellation
pub constellation_chan: Arc<ReentrantMutex<Sender<FromCompositorMsg>>>,
}
impl FromCompositorLogger {
/// Create a new constellation logger.
pub fn new(constellation_chan: Sender<FromCompositorMsg>) -> FromCompositorLogger {
FromCompositorLogger {
constellation_chan: Arc::new(ReentrantMutex::new(constellation_chan))
}
}
/// The maximum log level the constellation logger is interested in.
pub fn filter(&self) -> LogLevelFilter {
LogLevelFilter::Warn
}
}
impl Log for FromCompositorLogger {
fn enabled(&self, metadata: &LogMetadata) -> bool {
metadata.level() <= LogLevel::Warn
}
fn log(&self, record: &LogRecord) {
if let Some(entry) = log_entry(record) {
debug!("Sending log entry {:?}.", entry);
let top_level_frame_id = FrameId::installed();
let thread_name = thread::current().name().map(ToOwned::to_owned);
let msg = FromCompositorMsg::LogEntry(top_level_frame_id, thread_name, entry);
let chan = self.constellation_chan.lock().unwrap_or_else(|err| err.into_inner());
let _ = chan.send(msg);
}
}
}
/// Rust uses `LogRecord` for storing logging, but servo converts that to
/// a `LogEntry`. We do this so that we can record panics as well as log
/// messages, and because `LogRecord` does not implement serde (de)serialization,
/// so cannot be used over an IPC channel.
fn log_entry(record: &LogRecord) -> Option<LogEntry> {
match record.level() {
LogLevel::Error if thread::panicking() => Some(LogEntry::Panic(
format!("{}", record.args()),
format!("{:?}", Backtrace::new())
)),
LogLevel::Error => Some(LogEntry::Error(
format!("{}", record.args())
)),
LogLevel::Warn => Some(LogEntry::Warn(
format!("{}", record.args())
)),
_ => None,
}
}
/// The number of warnings to include in each crash report.
const WARNINGS_BUFFER_SIZE: usize = 32;
/// Route an ipc receiver to an mpsc receiver, preserving any errors.
/// This is the same as `route_ipc_receiver_to_new_mpsc_receiver`,
/// but does not panic on deserializtion errors.
fn route_ipc_receiver_to_new_mpsc_receiver_preserving_errors<T>(ipc_receiver: IpcReceiver<T>)
-> Receiver<Result<T, IpcError>>
where T: Deserialize + Serialize + Send + 'static
{
let (mpsc_sender, mpsc_receiver) = channel();
ROUTER.add_route(ipc_receiver.to_opaque(), Box::new(move |message| {
drop(mpsc_sender.send(message.to::<T>()))
}));
mpsc_receiver
}
impl<Message, LTF, STF> Constellation<Message, LTF, STF>
where LTF: LayoutThreadFactory<Message=Message>,
STF: ScriptThreadFactory<Message=Message>
{
/// Create a new constellation thread.
pub fn start(state: InitialConstellationState) -> (Sender<FromCompositorMsg>, IpcSender<SWManagerMsg>) {
let (compositor_sender, compositor_receiver) = channel();
// service worker manager to communicate with constellation
let (swmanager_sender, swmanager_receiver) = ipc::channel().expect("ipc channel failure");
let sw_mgr_clone = swmanager_sender.clone();
thread::Builder::new().name("Constellation".to_owned()).spawn(move || {
let (ipc_script_sender, ipc_script_receiver) = ipc::channel().expect("ipc channel failure");
let script_receiver = route_ipc_receiver_to_new_mpsc_receiver_preserving_errors(ipc_script_receiver);
let (ipc_layout_sender, ipc_layout_receiver) = ipc::channel().expect("ipc channel failure");
let layout_receiver = route_ipc_receiver_to_new_mpsc_receiver_preserving_errors(ipc_layout_receiver);
let swmanager_receiver = route_ipc_receiver_to_new_mpsc_receiver_preserving_errors(swmanager_receiver);
PipelineNamespace::install(PipelineNamespaceId(0));
let mut constellation: Constellation<Message, LTF, STF> = Constellation {
script_sender: ipc_script_sender,
layout_sender: ipc_layout_sender,
script_receiver: script_receiver,
compositor_receiver: compositor_receiver,
layout_receiver: layout_receiver,
compositor_proxy: state.compositor_proxy,
debugger_chan: state.debugger_chan,
devtools_chan: state.devtools_chan,
bluetooth_thread: state.bluetooth_thread,
public_resource_threads: state.public_resource_threads,
private_resource_threads: state.private_resource_threads,
image_cache_thread: state.image_cache_thread,
font_cache_thread: state.font_cache_thread,
swmanager_chan: None,
swmanager_receiver: swmanager_receiver,
swmanager_sender: sw_mgr_clone,
event_loops: HashMap::new(),
pipelines: HashMap::new(),
frames: HashMap::new(),
pending_frames: vec!(),
// We initialize the namespace at 1, since we reserved namespace 0 for the constellation
next_pipeline_namespace_id: PipelineNamespaceId(1),
root_frame_id: FrameId::new(),
focus_pipeline_id: None,
time_profiler_chan: state.time_profiler_chan,
mem_profiler_chan: state.mem_profiler_chan,
window_size: WindowSizeData {
initial_viewport: opts::get().initial_window_size.to_f32() *
ScaleFactor::new(1.0),
device_pixel_ratio:
ScaleFactor::new(opts::get().device_pixels_per_px.unwrap_or(1.0)),
},
phantom: PhantomData,
webdriver: WebDriverData::new(),
scheduler_chan: TimerScheduler::start(),
document_states: HashMap::new(),
webrender_api_sender: state.webrender_api_sender,
shutting_down: false,
handled_warnings: VecDeque::new(),
random_pipeline_closure: opts::get().random_pipeline_closure_probability.map(|prob| {
let seed = opts::get().random_pipeline_closure_seed.unwrap_or_else(random);
let rng = ServoRng::from_seed(&[seed]);
warn!("Randomly closing pipelines.");
info!("Using seed {} for random pipeline closure.", seed);
(rng, prob)
}),
webvr_thread: None
};
constellation.run();
}).expect("Thread spawning failed");
(compositor_sender, swmanager_sender)
}
/// The main event loop for the constellation.
fn run(&mut self) {
while !self.shutting_down || !self.pipelines.is_empty() {
// Randomly close a pipeline if --random-pipeline-closure-probability is set
// This is for testing the hardening of the constellation.
self.maybe_close_random_pipeline();
self.handle_request();
}
self.handle_shutdown();
}
/// Generate a new pipeline id namespace.
fn next_pipeline_namespace_id(&mut self) -> PipelineNamespaceId {
let namespace_id = self.next_pipeline_namespace_id;
let PipelineNamespaceId(ref mut i) = self.next_pipeline_namespace_id;
*i += 1;
namespace_id
}<|fim▁hole|> /// Helper function for creating a pipeline
fn new_pipeline(&mut self,
pipeline_id: PipelineId,
frame_id: FrameId,
parent_info: Option<(PipelineId, FrameType)>,
initial_window_size: Option<TypedSize2D<f32, CSSPixel>>,
load_data: LoadData,
sandbox: IFrameSandboxState,
is_private: bool) {
if self.shutting_down { return; }
// TODO: can we get a case where the child pipeline is created
// before the parent is part of the frame tree?
let top_level_frame_id = match parent_info {
Some((_, FrameType::MozBrowserIFrame)) => frame_id,
Some((parent_id, _)) => self.get_top_level_frame_for_pipeline(parent_id),
None => self.root_frame_id,
};
let (event_loop, host) = match sandbox {
IFrameSandboxState::IFrameSandboxed => (None, None),
IFrameSandboxState::IFrameUnsandboxed => match reg_host(&load_data.url) {
None => (None, None),
Some(host) => {
let event_loop = self.event_loops.get(&top_level_frame_id)
.and_then(|map| map.get(host))
.and_then(|weak| weak.upgrade());
match event_loop {
None => (None, Some(String::from(host))),
Some(event_loop) => (Some(event_loop.clone()), None),
}
},
},
};
let resource_threads = if is_private {
self.private_resource_threads.clone()
} else {
self.public_resource_threads.clone()
};
let parent_visibility = parent_info
.and_then(|(parent_pipeline_id, _)| self.pipelines.get(&parent_pipeline_id))
.map(|pipeline| pipeline.visible);
let prev_visibility = self.frames.get(&frame_id)
.and_then(|frame| self.pipelines.get(&frame.pipeline_id))
.map(|pipeline| pipeline.visible)
.or(parent_visibility);
// TODO: think about the case where the child pipeline is created
// before the parent is part of the frame tree.
let top_level_frame_id = match parent_info {
Some((_, FrameType::MozBrowserIFrame)) => frame_id,
Some((parent_id, _)) => self.get_top_level_frame_for_pipeline(parent_id),
None => self.root_frame_id,
};
let result = Pipeline::spawn::<Message, LTF, STF>(InitialPipelineState {
id: pipeline_id,
frame_id: frame_id,
top_level_frame_id: top_level_frame_id,
parent_info: parent_info,
constellation_chan: self.script_sender.clone(),
layout_to_constellation_chan: self.layout_sender.clone(),
scheduler_chan: self.scheduler_chan.clone(),
compositor_proxy: self.compositor_proxy.clone_compositor_proxy(),
devtools_chan: self.devtools_chan.clone(),
bluetooth_thread: self.bluetooth_thread.clone(),
swmanager_thread: self.swmanager_sender.clone(),
image_cache_thread: self.image_cache_thread.clone(),
font_cache_thread: self.font_cache_thread.clone(),
resource_threads: resource_threads,
time_profiler_chan: self.time_profiler_chan.clone(),
mem_profiler_chan: self.mem_profiler_chan.clone(),
window_size: initial_window_size,
event_loop: event_loop,
load_data: load_data,
device_pixel_ratio: self.window_size.device_pixel_ratio,
pipeline_namespace_id: self.next_pipeline_namespace_id(),
prev_visibility: prev_visibility,
webrender_api_sender: self.webrender_api_sender.clone(),
is_private: is_private,
webvr_thread: self.webvr_thread.clone()
});
let pipeline = match result {
Ok(result) => result,
Err(e) => return self.handle_send_error(pipeline_id, e),
};
if let Some(host) = host {
self.event_loops.entry(top_level_frame_id)
.or_insert_with(HashMap::new)
.insert(host, Rc::downgrade(&pipeline.event_loop));
}
assert!(!self.pipelines.contains_key(&pipeline_id));
self.pipelines.insert(pipeline_id, pipeline);
}
/// Get an iterator for the current frame tree. Specify self.root_frame_id to
/// iterate the entire tree, or a specific frame id to iterate only that sub-tree.
/// Iterates over the fully active frames in the tree.
fn current_frame_tree_iter(&self, frame_id_root: FrameId) -> FrameTreeIterator {
FrameTreeIterator {
stack: vec!(frame_id_root),
pipelines: &self.pipelines,
frames: &self.frames,
}
}
/// Get an iterator for the current frame tree. Specify self.root_frame_id to
/// iterate the entire tree, or a specific frame id to iterate only that sub-tree.
/// Iterates over all frames in the tree.
fn full_frame_tree_iter(&self, frame_id_root: FrameId) -> FullFrameTreeIterator {
FullFrameTreeIterator {
stack: vec!(frame_id_root),
pipelines: &self.pipelines,
frames: &self.frames,
}
}
/// The joint session future is the merge of the session future of every
/// frame in the frame tree, sorted chronologically.
fn joint_session_future<'a>(&'a self, frame_id_root: FrameId) -> impl Iterator<Item=FrameState> {
let mut future: Vec<FrameState> = self.full_frame_tree_iter(frame_id_root)
.flat_map(|frame| frame.next.iter().cloned())
.collect();
// Sort the joint session future by the timestamp that the pipeline was navigated to
// in chronological order
future.sort_by(|a, b| a.instant.cmp(&b.instant));
future.into_iter()
}
/// Is the joint session future empty?
fn joint_session_future_is_empty(&self, frame_id_root: FrameId) -> bool {
self.full_frame_tree_iter(frame_id_root)
.all(|frame| frame.next.is_empty())
}
/// The joint session past is the merge of the session past of every
/// frame in the frame tree, sorted reverse chronologically.
fn joint_session_past<'a>(&self, frame_id_root: FrameId) -> impl Iterator<Item=FrameState> {
let mut past: Vec<(Instant, FrameState)> = self.full_frame_tree_iter(frame_id_root)
.flat_map(|frame| frame.prev.iter().rev().scan(frame.instant, |prev_instant, entry| {
let instant = *prev_instant;
*prev_instant = entry.instant;
Some((instant, entry.clone()))
}))
.collect();
// Sort the joint session past by the timestamp that the pipeline was navigated from
// in reverse chronological order
past.sort_by(|a, b| b.0.cmp(&a.0));
past.into_iter().map(|(_, entry)| entry)
}
/// Is the joint session past empty?
fn joint_session_past_is_empty(&self, frame_id_root: FrameId) -> bool {
self.full_frame_tree_iter(frame_id_root)
.all(|frame| frame.prev.is_empty())
}
/// Create a new frame and update the internal bookkeeping.
fn new_frame(&mut self, frame_id: FrameId, pipeline_id: PipelineId, url: ServoUrl) {
let frame = Frame::new(frame_id, pipeline_id, url);
self.frames.insert(frame_id, frame);
// If a child frame, add it to the parent pipeline.
let parent_info = self.pipelines.get(&pipeline_id)
.and_then(|pipeline| pipeline.parent_info);
if let Some((parent_id, _)) = parent_info {
if let Some(parent) = self.pipelines.get_mut(&parent_id) {
parent.add_child(frame_id);
}
}
}
/// Handles loading pages, navigation, and granting access to the compositor
#[allow(unsafe_code)]
fn handle_request(&mut self) {
enum Request {
Script(FromScriptMsg),
Compositor(FromCompositorMsg),
Layout(FromLayoutMsg),
FromSWManager(SWManagerMsg),
}
// Get one incoming request.
// This is one of the few places where the compositor is
// allowed to panic. If one of the receiver.recv() calls
// fails, it is because the matching sender has been
// reclaimed, but this can't happen in normal execution
// because the constellation keeps a pointer to the sender,
// so it should never be reclaimed. A possible scenario in
// which receiver.recv() fails is if some unsafe code
// produces undefined behaviour, resulting in the destructor
// being called. If this happens, there's not much we can do
// other than panic.
let request = {
let receiver_from_script = &self.script_receiver;
let receiver_from_compositor = &self.compositor_receiver;
let receiver_from_layout = &self.layout_receiver;
let receiver_from_swmanager = &self.swmanager_receiver;
select! {
msg = receiver_from_script.recv() =>
msg.expect("Unexpected script channel panic in constellation").map(Request::Script),
msg = receiver_from_compositor.recv() =>
Ok(Request::Compositor(msg.expect("Unexpected compositor channel panic in constellation"))),
msg = receiver_from_layout.recv() =>
msg.expect("Unexpected layout channel panic in constellation").map(Request::Layout),
msg = receiver_from_swmanager.recv() =>
msg.expect("Unexpected panic channel panic in constellation").map(Request::FromSWManager)
}
};
let request = match request {
Ok(request) => request,
Err(err) => {
// Treat deserialization error the same as receiving a panic message
debug!("Deserialization failed ({:?}).", err);
let reason = format!("Deserialization failed ({})", err);
let root_frame_id = self.root_frame_id;
return self.handle_panic(root_frame_id, reason, None);
}
};
match request {
Request::Compositor(message) => {
self.handle_request_from_compositor(message)
},
Request::Script(message) => {
self.handle_request_from_script(message);
},
Request::Layout(message) => {
self.handle_request_from_layout(message);
},
Request::FromSWManager(message) => {
self.handle_request_from_swmanager(message);
}
}
}
fn handle_request_from_swmanager(&mut self, message: SWManagerMsg) {
match message {
SWManagerMsg::OwnSender(sw_sender) => {
// store service worker manager for communicating with it.
self.swmanager_chan = Some(sw_sender);
}
}
}
fn handle_request_from_compositor(&mut self, message: FromCompositorMsg) {
match message {
FromCompositorMsg::Exit => {
debug!("constellation exiting");
self.handle_exit();
}
FromCompositorMsg::GetFrame(pipeline_id, resp_chan) => {
debug!("constellation got get root pipeline message");
self.handle_get_frame(pipeline_id, resp_chan);
}
FromCompositorMsg::GetPipeline(frame_id, resp_chan) => {
debug!("constellation got get root pipeline message");
self.handle_get_pipeline(frame_id, resp_chan);
}
FromCompositorMsg::GetPipelineTitle(pipeline_id) => {
debug!("constellation got get-pipeline-title message");
self.handle_get_pipeline_title_msg(pipeline_id);
}
FromCompositorMsg::KeyEvent(ch, key, state, modifiers) => {
debug!("constellation got key event message");
self.handle_key_msg(ch, key, state, modifiers);
}
// Load a new page from a typed url
// If there is already a pending page (self.pending_frames), it will not be overridden;
// However, if the id is not encompassed by another change, it will be.
FromCompositorMsg::LoadUrl(source_id, load_data) => {
debug!("constellation got URL load message from compositor");
self.handle_load_url_msg(source_id, load_data, false);
}
FromCompositorMsg::IsReadyToSaveImage(pipeline_states) => {
let is_ready = self.handle_is_ready_to_save_image(pipeline_states);
debug!("Ready to save image {:?}.", is_ready);
if opts::get().is_running_problem_test {
println!("got ready to save image query, result is {:?}", is_ready);
}
let is_ready = is_ready == ReadyToSave::Ready;
self.compositor_proxy.send(ToCompositorMsg::IsReadyToSaveImageReply(is_ready));
if opts::get().is_running_problem_test {
println!("sent response");
}
}
// This should only be called once per constellation, and only by the browser
FromCompositorMsg::InitLoadUrl(url) => {
debug!("constellation got init load URL message");
self.handle_init_load(url);
}
// Handle a forward or back request
FromCompositorMsg::TraverseHistory(pipeline_id, direction) => {
debug!("constellation got traverse history message from compositor");
self.handle_traverse_history_msg(pipeline_id, direction);
}
FromCompositorMsg::WindowSize(new_size, size_type) => {
debug!("constellation got window resize message");
self.handle_window_size_msg(new_size, size_type);
}
FromCompositorMsg::TickAnimation(pipeline_id, tick_type) => {
self.handle_tick_animation(pipeline_id, tick_type)
}
FromCompositorMsg::WebDriverCommand(command) => {
debug!("constellation got webdriver command message");
self.handle_webdriver_msg(command);
}
FromCompositorMsg::Reload => {
debug!("constellation got reload message");
self.handle_reload_msg();
}
FromCompositorMsg::LogEntry(top_level_frame_id, thread_name, entry) => {
self.handle_log_entry(top_level_frame_id, thread_name, entry);
}
FromCompositorMsg::SetWebVRThread(webvr_thread) => {
assert!(self.webvr_thread.is_none());
self.webvr_thread = Some(webvr_thread)
}
FromCompositorMsg::WebVREvent(pipeline_ids, event) => {
debug!("constellation got WebVR event");
self.handle_webvr_event(pipeline_ids, event);
}
}
}
fn handle_request_from_script(&mut self, message: FromScriptMsg) {
match message {
FromScriptMsg::PipelineExited(pipeline_id) => {
self.handle_pipeline_exited(pipeline_id);
}
FromScriptMsg::ScriptLoadedURLInIFrame(load_info) => {
debug!("constellation got iframe URL load message {:?} {:?} {:?}",
load_info.info.parent_pipeline_id,
load_info.old_pipeline_id,
load_info.info.new_pipeline_id);
self.handle_script_loaded_url_in_iframe_msg(load_info);
}
FromScriptMsg::ScriptLoadedAboutBlankInIFrame(load_info, lc) => {
debug!("constellation got loaded `about:blank` in iframe message {:?} {:?}",
load_info.parent_pipeline_id,
load_info.new_pipeline_id);
self.handle_script_loaded_about_blank_in_iframe_msg(load_info, lc);
}
FromScriptMsg::ChangeRunningAnimationsState(pipeline_id, animation_state) => {
self.handle_change_running_animations_state(pipeline_id, animation_state)
}
// Load a new page from a mouse click
// If there is already a pending page (self.pending_frames), it will not be overridden;
// However, if the id is not encompassed by another change, it will be.
FromScriptMsg::LoadUrl(source_id, load_data, replace) => {
debug!("constellation got URL load message from script");
self.handle_load_url_msg(source_id, load_data, replace);
}
// A page loaded has completed all parsing, script, and reflow messages have been sent.
FromScriptMsg::LoadComplete(pipeline_id) => {
debug!("constellation got load complete message");
self.handle_load_complete_msg(pipeline_id)
}
// Handle a forward or back request
FromScriptMsg::TraverseHistory(pipeline_id, direction) => {
debug!("constellation got traverse history message from script");
self.handle_traverse_history_msg(pipeline_id, direction);
}
// Handle a joint session history length request.
FromScriptMsg::JointSessionHistoryLength(pipeline_id, sender) => {
debug!("constellation got joint session history length message from script");
self.handle_joint_session_history_length(pipeline_id, sender);
}
// Notification that the new document is ready to become active
FromScriptMsg::ActivateDocument(pipeline_id) => {
debug!("constellation got activate document message");
self.handle_activate_document_msg(pipeline_id);
}
// Update pipeline url after redirections
FromScriptMsg::SetFinalUrl(pipeline_id, final_url) => {
// The script may have finished loading after we already started shutting down.
if let Some(ref mut pipeline) = self.pipelines.get_mut(&pipeline_id) {
debug!("constellation got set final url message");
pipeline.url = final_url;
} else {
warn!("constellation got set final url message for dead pipeline");
}
}
FromScriptMsg::MozBrowserEvent(parent_pipeline_id, pipeline_id, event) => {
debug!("constellation got mozbrowser event message");
self.handle_mozbrowser_event_msg(parent_pipeline_id,
pipeline_id,
event);
}
FromScriptMsg::Focus(pipeline_id) => {
debug!("constellation got focus message");
self.handle_focus_msg(pipeline_id);
}
FromScriptMsg::ForwardEvent(pipeline_id, event) => {
let msg = ConstellationControlMsg::SendEvent(pipeline_id, event);
let result = match self.pipelines.get(&pipeline_id) {
None => { debug!("Pipeline {:?} got event after closure.", pipeline_id); return; }
Some(pipeline) => pipeline.event_loop.send(msg),
};
if let Err(e) = result {
self.handle_send_error(pipeline_id, e);
}
}
FromScriptMsg::GetClipboardContents(sender) => {
if let Err(e) = sender.send("".to_owned()) {
warn!("Failed to send clipboard ({})", e);
}
}
FromScriptMsg::SetClipboardContents(_) => {
}
FromScriptMsg::SetVisible(pipeline_id, visible) => {
debug!("constellation got set visible messsage");
self.handle_set_visible_msg(pipeline_id, visible);
}
FromScriptMsg::VisibilityChangeComplete(pipeline_id, visible) => {
debug!("constellation got set visibility change complete message");
self.handle_visibility_change_complete(pipeline_id, visible);
}
FromScriptMsg::RemoveIFrame(frame_id, sender) => {
debug!("constellation got remove iframe message");
let removed_pipeline_ids = self.handle_remove_iframe_msg(frame_id);
if let Err(e) = sender.send(removed_pipeline_ids) {
warn!("Error replying to remove iframe ({})", e);
}
}
FromScriptMsg::NewFavicon(url) => {
debug!("constellation got new favicon message");
self.compositor_proxy.send(ToCompositorMsg::NewFavicon(url));
}
FromScriptMsg::HeadParsed => {
debug!("constellation got head parsed message");
self.compositor_proxy.send(ToCompositorMsg::HeadParsed);
}
FromScriptMsg::CreateCanvasPaintThread(size, sender) => {
debug!("constellation got create-canvas-paint-thread message");
self.handle_create_canvas_paint_thread_msg(&size, sender)
}
FromScriptMsg::CreateWebGLPaintThread(size, attributes, sender) => {
debug!("constellation got create-WebGL-paint-thread message");
self.handle_create_webgl_paint_thread_msg(&size, attributes, sender)
}
FromScriptMsg::NodeStatus(message) => {
debug!("constellation got NodeStatus message");
self.compositor_proxy.send(ToCompositorMsg::Status(message));
}
FromScriptMsg::SetDocumentState(pipeline_id, state) => {
debug!("constellation got SetDocumentState message");
self.document_states.insert(pipeline_id, state);
}
FromScriptMsg::Alert(pipeline_id, message, sender) => {
debug!("constellation got Alert message");
self.handle_alert(pipeline_id, message, sender);
}
FromScriptMsg::ScrollFragmentPoint(pipeline_id, scroll_root_id, point, smooth) => {
self.compositor_proxy.send(ToCompositorMsg::ScrollFragmentPoint(pipeline_id,
scroll_root_id,
point,
smooth));
}
FromScriptMsg::GetClientWindow(send) => {
self.compositor_proxy.send(ToCompositorMsg::GetClientWindow(send));
}
FromScriptMsg::MoveTo(point) => {
self.compositor_proxy.send(ToCompositorMsg::MoveTo(point));
}
FromScriptMsg::ResizeTo(size) => {
self.compositor_proxy.send(ToCompositorMsg::ResizeTo(size));
}
FromScriptMsg::Exit => {
self.compositor_proxy.send(ToCompositorMsg::Exit);
}
FromScriptMsg::LogEntry(top_level_frame_id, thread_name, entry) => {
self.handle_log_entry(top_level_frame_id, thread_name, entry);
}
FromScriptMsg::SetTitle(pipeline_id, title) => {
self.compositor_proxy.send(ToCompositorMsg::ChangePageTitle(pipeline_id, title))
}
FromScriptMsg::SendKeyEvent(ch, key, key_state, key_modifiers) => {
self.compositor_proxy.send(ToCompositorMsg::KeyEvent(ch, key, key_state, key_modifiers))
}
FromScriptMsg::TouchEventProcessed(result) => {
self.compositor_proxy.send(ToCompositorMsg::TouchEventProcessed(result))
}
FromScriptMsg::RegisterServiceWorker(scope_things, scope) => {
debug!("constellation got store registration scope message");
self.handle_register_serviceworker(scope_things, scope);
}
FromScriptMsg::ForwardDOMMessage(msg_vec, scope_url) => {
if let Some(ref mgr) = self.swmanager_chan {
let _ = mgr.send(ServiceWorkerMsg::ForwardDOMMessage(msg_vec, scope_url));
} else {
warn!("Unable to forward DOMMessage for postMessage call");
}
}
FromScriptMsg::BroadcastStorageEvent(pipeline_id, storage, url, key, old_value, new_value) => {
self.handle_broadcast_storage_event(pipeline_id, storage, url, key, old_value, new_value);
}
FromScriptMsg::SetFullscreenState(state) => {
self.compositor_proxy.send(ToCompositorMsg::SetFullscreenState(state));
}
}
}
fn handle_request_from_layout(&mut self, message: FromLayoutMsg) {
match message {
FromLayoutMsg::ChangeRunningAnimationsState(pipeline_id, animation_state) => {
self.handle_change_running_animations_state(pipeline_id, animation_state)
}
// Layout sends new sizes for all subframes. This needs to be reflected by all
// frame trees in the navigation context containing the subframe.
FromLayoutMsg::FrameSizes(iframe_sizes) => {
debug!("constellation got frame size message");
self.handle_frame_size_msg(iframe_sizes);
}
FromLayoutMsg::SetCursor(cursor) => {
self.handle_set_cursor_msg(cursor)
}
FromLayoutMsg::ViewportConstrained(pipeline_id, constraints) => {
debug!("constellation got viewport-constrained event message");
self.handle_viewport_constrained_msg(pipeline_id, constraints);
}
}
}
fn handle_register_serviceworker(&self, scope_things: ScopeThings, scope: ServoUrl) {
if let Some(ref mgr) = self.swmanager_chan {
let _ = mgr.send(ServiceWorkerMsg::RegisterServiceWorker(scope_things, scope));
} else {
warn!("sending scope info to service worker manager failed");
}
}
fn handle_broadcast_storage_event(&self, pipeline_id: PipelineId, storage: StorageType, url: ServoUrl,
key: Option<String>, old_value: Option<String>, new_value: Option<String>) {
let origin = url.origin();
for pipeline in self.pipelines.values() {
if (pipeline.id != pipeline_id) && (pipeline.url.origin() == origin) {
let msg = ConstellationControlMsg::DispatchStorageEvent(
pipeline.id, storage, url.clone(), key.clone(), old_value.clone(), new_value.clone()
);
if let Err(err) = pipeline.event_loop.send(msg) {
warn!("Failed to broadcast storage event to pipeline {} ({:?}).", pipeline.id, err);
}
}
}
}
fn handle_exit(&mut self) {
// TODO: add a timer, which forces shutdown if threads aren't responsive.
if self.shutting_down { return; }
self.shutting_down = true;
self.mem_profiler_chan.send(mem::ProfilerMsg::Exit);
// TODO: exit before the root frame is initialized?
debug!("Removing root frame.");
let root_frame_id = self.root_frame_id;
self.close_frame(root_frame_id, ExitPipelineMode::Normal);
// Close any pending frames and pipelines
while let Some(pending) = self.pending_frames.pop() {
debug!("Removing pending frame {}.", pending.frame_id);
self.close_frame(pending.frame_id, ExitPipelineMode::Normal);
debug!("Removing pending pipeline {}.", pending.new_pipeline_id);
self.close_pipeline(pending.new_pipeline_id, DiscardBrowsingContext::Yes, ExitPipelineMode::Normal);
}
// In case there are frames which weren't attached to the frame tree, we close them.
let frame_ids: Vec<FrameId> = self.frames.keys().cloned().collect();
for frame_id in frame_ids {
debug!("Removing detached frame {}.", frame_id);
self.close_frame(frame_id, ExitPipelineMode::Normal);
}
// In case there are pipelines which weren't attached to the pipeline tree, we close them.
let pipeline_ids: Vec<PipelineId> = self.pipelines.keys().cloned().collect();
for pipeline_id in pipeline_ids {
debug!("Removing detached pipeline {}.", pipeline_id);
self.close_pipeline(pipeline_id, DiscardBrowsingContext::Yes, ExitPipelineMode::Normal);
}
}
fn handle_shutdown(&mut self) {
// At this point, there are no active pipelines,
// so we can safely block on other threads, without worrying about deadlock.
// Channels to receive signals when threads are done exiting.
let (core_sender, core_receiver) = ipc::channel().expect("Failed to create IPC channel!");
let (storage_sender, storage_receiver) = ipc::channel().expect("Failed to create IPC channel!");
debug!("Exiting image cache.");
self.image_cache_thread.exit();
debug!("Exiting core resource threads.");
if let Err(e) = self.public_resource_threads.send(net_traits::CoreResourceMsg::Exit(core_sender)) {
warn!("Exit resource thread failed ({})", e);
}
if let Some(ref chan) = self.debugger_chan {
debugger::shutdown_server(chan);
}
if let Some(ref chan) = self.devtools_chan {
debug!("Exiting devtools.");
let msg = DevtoolsControlMsg::FromChrome(ChromeToDevtoolsControlMsg::ServerExitMsg);
if let Err(e) = chan.send(msg) {
warn!("Exit devtools failed ({})", e);
}
}
debug!("Exiting storage resource threads.");
if let Err(e) = self.public_resource_threads.send(StorageThreadMsg::Exit(storage_sender)) {
warn!("Exit storage thread failed ({})", e);
}
debug!("Exiting bluetooth thread.");
if let Err(e) = self.bluetooth_thread.send(BluetoothRequest::Exit) {
warn!("Exit bluetooth thread failed ({})", e);
}
debug!("Exiting service worker manager thread.");
if let Some(mgr) = self.swmanager_chan.as_ref() {
if let Err(e) = mgr.send(ServiceWorkerMsg::Exit) {
warn!("Exit service worker manager failed ({})", e);
}
}
if let Some(chan) = self.webvr_thread.as_ref() {
debug!("Exiting WebVR thread.");
if let Err(e) = chan.send(WebVRMsg::Exit) {
warn!("Exit WebVR thread failed ({})", e);
}
}
debug!("Exiting font cache thread.");
self.font_cache_thread.exit();
// Receive exit signals from threads.
if let Err(e) = core_receiver.recv() {
warn!("Exit resource thread failed ({})", e);
}
if let Err(e) = storage_receiver.recv() {
warn!("Exit storage thread failed ({})", e);
}
debug!("Asking compositor to complete shutdown.");
self.compositor_proxy.send(ToCompositorMsg::ShutdownComplete);
}
fn handle_pipeline_exited(&mut self, pipeline_id: PipelineId) {
debug!("Pipeline {:?} exited.", pipeline_id);
self.pipelines.remove(&pipeline_id);
}
fn handle_send_error(&mut self, pipeline_id: PipelineId, err: IpcError) {
// Treat send error the same as receiving a panic message
debug!("Pipeline {:?} send error ({}).", pipeline_id, err);
let top_level_frame_id = self.get_top_level_frame_for_pipeline(pipeline_id);
let reason = format!("Send failed ({})", err);
self.handle_panic(top_level_frame_id, reason, None);
}
fn handle_panic(&mut self, top_level_frame_id: FrameId, reason: String, backtrace: Option<String>) {
if opts::get().hard_fail {
// It's quite difficult to make Servo exit cleanly if some threads have failed.
// Hard fail exists for test runners so we crash and that's good enough.
println!("Pipeline failed in hard-fail mode. Crashing!");
process::exit(1);
}
debug!("Panic handler for top-level frame {}: {}.", top_level_frame_id, reason);
// Notify the browser chrome that the pipeline has failed
self.trigger_mozbrowsererror(top_level_frame_id, reason, backtrace);
let pipeline_id = self.frames.get(&top_level_frame_id).map(|frame| frame.pipeline_id);
let pipeline_url = pipeline_id.and_then(|id| self.pipelines.get(&id).map(|pipeline| pipeline.url.clone()));
let parent_info = pipeline_id.and_then(|id| self.pipelines.get(&id).and_then(|pipeline| pipeline.parent_info));
let window_size = pipeline_id.and_then(|id| self.pipelines.get(&id).and_then(|pipeline| pipeline.size));
self.close_frame_children(top_level_frame_id, DiscardBrowsingContext::No, ExitPipelineMode::Force);
let failure_url = ServoUrl::parse("about:failure").expect("infallible");
if let Some(pipeline_url) = pipeline_url {
if pipeline_url == failure_url {
return error!("about:failure failed");
}
}
warn!("creating replacement pipeline for about:failure");
let new_pipeline_id = PipelineId::new();
let load_data = LoadData::new(failure_url.clone(), None, None);
let sandbox = IFrameSandboxState::IFrameSandboxed;
self.new_pipeline(new_pipeline_id, top_level_frame_id, parent_info, window_size, load_data, sandbox, false);
self.pending_frames.push(FrameChange {
frame_id: top_level_frame_id,
old_pipeline_id: pipeline_id,
new_pipeline_id: new_pipeline_id,
url: failure_url,
replace: None,
});
}
fn handle_log_entry(&mut self, top_level_frame_id: Option<FrameId>, thread_name: Option<String>, entry: LogEntry) {
debug!("Received log entry {:?}.", entry);
match entry {
LogEntry::Panic(reason, backtrace) => {
let top_level_frame_id = top_level_frame_id.unwrap_or(self.root_frame_id);
self.handle_panic(top_level_frame_id, reason, Some(backtrace));
},
LogEntry::Error(reason) | LogEntry::Warn(reason) => {
// VecDeque::truncate is unstable
if WARNINGS_BUFFER_SIZE <= self.handled_warnings.len() {
self.handled_warnings.pop_front();
}
self.handled_warnings.push_back((thread_name, reason));
},
}
}
fn handle_webvr_event(&mut self, ids: Vec<PipelineId>, event: WebVREventMsg) {
for id in ids {
match self.pipelines.get_mut(&id) {
Some(ref pipeline) => {
// Notify script thread
let _ = pipeline.event_loop.send(ConstellationControlMsg::WebVREvent(id, event.clone()));
},
None => warn!("constellation got webvr event for dead pipeline")
}
}
}
fn handle_init_load(&mut self, url: ServoUrl) {
let window_size = self.window_size.initial_viewport;
let root_pipeline_id = PipelineId::new();
let root_frame_id = self.root_frame_id;
let load_data = LoadData::new(url.clone(), None, None);
let sandbox = IFrameSandboxState::IFrameUnsandboxed;
self.new_pipeline(root_pipeline_id, root_frame_id, None, Some(window_size), load_data, sandbox, false);
self.handle_load_start_msg(root_pipeline_id);
self.pending_frames.push(FrameChange {
frame_id: self.root_frame_id,
old_pipeline_id: None,
new_pipeline_id: root_pipeline_id,
url: url.clone(),
replace: None,
});
self.compositor_proxy.send(ToCompositorMsg::ChangePageUrl(root_pipeline_id, url));
}
fn handle_frame_size_msg(&mut self,
iframe_sizes: Vec<(PipelineId, TypedSize2D<f32, CSSPixel>)>) {
for (pipeline_id, size) in iframe_sizes {
let result = {
let pipeline = match self.pipelines.get_mut(&pipeline_id) {
Some(pipeline) => pipeline,
None => continue,
};
if pipeline.size == Some(size) {
continue;
}
pipeline.size = Some(size);
let msg = ConstellationControlMsg::Resize(pipeline_id, WindowSizeData {
initial_viewport: size,
device_pixel_ratio: self.window_size.device_pixel_ratio,
}, WindowSizeType::Initial);
pipeline.event_loop.send(msg)
};
if let Err(e) = result {
self.handle_send_error(pipeline_id, e);
}
}
}
fn handle_subframe_loaded(&mut self, pipeline_id: PipelineId) {
let (frame_id, parent_id) = match self.pipelines.get(&pipeline_id) {
Some(pipeline) => match pipeline.parent_info {
Some((parent_id, _)) => (pipeline.frame_id, parent_id),
None => return warn!("Pipeline {} has no parent.", pipeline_id),
},
None => return warn!("Pipeline {} loaded after closure.", pipeline_id),
};
let msg = ConstellationControlMsg::DispatchFrameLoadEvent {
target: frame_id,
parent: parent_id,
child: pipeline_id,
};
let result = match self.pipelines.get(&parent_id) {
Some(parent) => parent.event_loop.send(msg),
None => return warn!("Parent {} frame loaded after closure.", parent_id),
};
if let Err(e) = result {
self.handle_send_error(parent_id, e);
}
}
// The script thread associated with pipeline_id has loaded a URL in an iframe via script. This
// will result in a new pipeline being spawned and a frame tree being added to
// parent_pipeline_id's frame tree's children. This message is never the result of a
// page navigation.
fn handle_script_loaded_url_in_iframe_msg(&mut self, load_info: IFrameLoadInfoWithData) {
let (load_data, window_size, is_private) = {
let old_pipeline = load_info.old_pipeline_id
.and_then(|old_pipeline_id| self.pipelines.get(&old_pipeline_id));
let source_pipeline = match self.pipelines.get(&load_info.info.parent_pipeline_id) {
Some(source_pipeline) => source_pipeline,
None => return warn!("Script loaded url in closed iframe {}.", load_info.info.parent_pipeline_id),
};
// If no url is specified, reload.
let load_data = load_info.load_data.unwrap_or_else(|| {
let url = match old_pipeline {
Some(old_pipeline) => old_pipeline.url.clone(),
None => ServoUrl::parse("about:blank").expect("infallible"),
};
// TODO - loaddata here should have referrer info (not None, None)
LoadData::new(url, None, None)
});
let is_private = load_info.info.is_private || source_pipeline.is_private;
let window_size = old_pipeline.and_then(|old_pipeline| old_pipeline.size);
(load_data, window_size, is_private)
};
let replace = if load_info.info.replace {
self.frames.get(&load_info.info.frame_id).map(|frame| frame.current())
} else {
None
};
// Create the new pipeline, attached to the parent and push to pending frames
self.pending_frames.push(FrameChange {
frame_id: load_info.info.frame_id,
old_pipeline_id: load_info.old_pipeline_id,
new_pipeline_id: load_info.info.new_pipeline_id,
url: load_data.url.clone(),
replace: replace,
});
self.new_pipeline(load_info.info.new_pipeline_id,
load_info.info.frame_id,
Some((load_info.info.parent_pipeline_id, load_info.info.frame_type)),
window_size,
load_data,
load_info.sandbox,
is_private);
}
fn handle_script_loaded_about_blank_in_iframe_msg(&mut self,
load_info: IFrameLoadInfo,
layout_sender: IpcSender<LayoutControlMsg>) {
let IFrameLoadInfo {
parent_pipeline_id,
new_pipeline_id,
frame_type,
replace,
frame_id,
is_private,
} = load_info;
let url = ServoUrl::parse("about:blank").expect("infallible");
let pipeline = {
let parent_pipeline = match self.pipelines.get(&parent_pipeline_id) {
Some(parent_pipeline) => parent_pipeline,
None => return warn!("Script loaded url in closed iframe {}.", parent_pipeline_id),
};
let script_sender = parent_pipeline.event_loop.clone();
Pipeline::new(new_pipeline_id,
frame_id,
Some((parent_pipeline_id, frame_type)),
script_sender,
layout_sender,
self.compositor_proxy.clone_compositor_proxy(),
is_private || parent_pipeline.is_private,
url.clone(),
None,
parent_pipeline.visible)
};
let replace = if replace {
self.frames.get(&frame_id).map(|frame| frame.current())
} else {
None
};
assert!(!self.pipelines.contains_key(&new_pipeline_id));
self.pipelines.insert(new_pipeline_id, pipeline);
self.pending_frames.push(FrameChange {
frame_id: frame_id,
old_pipeline_id: None,
new_pipeline_id: new_pipeline_id,
url: url,
replace: replace,
});
}
fn handle_set_cursor_msg(&mut self, cursor: Cursor) {
self.compositor_proxy.send(ToCompositorMsg::SetCursor(cursor))
}
fn handle_change_running_animations_state(&mut self,
pipeline_id: PipelineId,
animation_state: AnimationState) {
self.compositor_proxy.send(ToCompositorMsg::ChangeRunningAnimationsState(pipeline_id,
animation_state))
}
fn handle_tick_animation(&mut self, pipeline_id: PipelineId, tick_type: AnimationTickType) {
let result = match tick_type {
AnimationTickType::Script => {
let msg = ConstellationControlMsg::TickAllAnimations(pipeline_id);
match self.pipelines.get(&pipeline_id) {
Some(pipeline) => pipeline.event_loop.send(msg),
None => return warn!("Pipeline {:?} got script tick after closure.", pipeline_id),
}
}
AnimationTickType::Layout => {
let msg = LayoutControlMsg::TickAnimations;
match self.pipelines.get(&pipeline_id) {
Some(pipeline) => pipeline.layout_chan.send(msg),
None => return warn!("Pipeline {:?} got layout tick after closure.", pipeline_id),
}
}
};
if let Err(e) = result {
self.handle_send_error(pipeline_id, e);
}
}
fn handle_alert(&mut self,
pipeline_id: PipelineId,
message: String,
sender: IpcSender<bool>) {
let pipeline_isnt_root = self.pipelines.get(&pipeline_id).and_then(|pipeline| pipeline.parent_info).is_some();
let mozbrowser_modal_prompt = pipeline_isnt_root && PREFS.is_mozbrowser_enabled();
if mozbrowser_modal_prompt {
// https://developer.mozilla.org/en-US/docs/Web/Events/mozbrowsershowmodalprompt
let prompt_type = String::from("alert");
let title = String::from("Alert");
let return_value = String::from("");
let event = MozBrowserEvent::ShowModalPrompt(prompt_type, title, message, return_value);
let top_level_frame_id = self.get_top_level_frame_for_pipeline(pipeline_id);
match self.frames.get(&self.root_frame_id) {
None => warn!("Alert sent after root frame closure."),
Some(root_frame) => match self.pipelines.get(&root_frame.pipeline_id) {
None => warn!("Alert sent after root pipeline closure."),
Some(root_pipeline) => root_pipeline.trigger_mozbrowser_event(Some(top_level_frame_id), event),
}
}
}
let result = sender.send(!mozbrowser_modal_prompt);
if let Err(e) = result {
self.handle_send_error(pipeline_id, e);
}
}
fn handle_load_url_msg(&mut self, source_id: PipelineId, load_data: LoadData, replace: bool) {
self.load_url(source_id, load_data, replace);
}
fn load_url(&mut self, source_id: PipelineId, load_data: LoadData, replace: bool) -> Option<PipelineId> {
debug!("Loading {} in pipeline {}.", load_data.url, source_id);
// If this load targets an iframe, its framing element may exist
// in a separate script thread than the framed document that initiated
// the new load. The framing element must be notified about the
// requested change so it can update its internal state.
//
// If replace is true, the current entry is replaced instead of a new entry being added.
let (frame_id, parent_info) = match self.pipelines.get(&source_id) {
Some(pipeline) => (pipeline.frame_id, pipeline.parent_info),
None => {
warn!("Pipeline {:?} loaded after closure.", source_id);
return None;
}
};
match parent_info {
Some((parent_pipeline_id, _)) => {
self.handle_load_start_msg(source_id);
// Message the constellation to find the script thread for this iframe
// and issue an iframe load through there.
let msg = ConstellationControlMsg::Navigate(parent_pipeline_id, frame_id, load_data, replace);
let result = match self.pipelines.get(&parent_pipeline_id) {
Some(parent_pipeline) => parent_pipeline.event_loop.send(msg),
None => {
warn!("Pipeline {:?} child loaded after closure", parent_pipeline_id);
return None;
},
};
if let Err(e) = result {
self.handle_send_error(parent_pipeline_id, e);
}
Some(source_id)
}
None => {
// Make sure no pending page would be overridden.
for frame_change in &self.pending_frames {
if frame_change.old_pipeline_id == Some(source_id) {
// id that sent load msg is being changed already; abort
return None;
}
}
if !self.pipeline_is_in_current_frame(source_id) {
// Disregard this load if the navigating pipeline is not actually
// active. This could be caused by a delayed navigation (eg. from
// a timer) or a race between multiple navigations (such as an
// onclick handler on an anchor element).
return None;
}
self.handle_load_start_msg(source_id);
// Being here means either there are no pending frames, or none of the pending
// changes would be overridden by changing the subframe associated with source_id.
// Create the new pipeline
let window_size = self.pipelines.get(&source_id).and_then(|source| source.size);
let new_pipeline_id = PipelineId::new();
let root_frame_id = self.root_frame_id;
let sandbox = IFrameSandboxState::IFrameUnsandboxed;
let replace = if replace {
self.frames.get(&frame_id).map(|frame| frame.current())
} else {
None
};
self.pending_frames.push(FrameChange {
frame_id: root_frame_id,
old_pipeline_id: Some(source_id),
new_pipeline_id: new_pipeline_id,
url: load_data.url.clone(),
replace: replace,
});
self.new_pipeline(new_pipeline_id, root_frame_id, None, window_size, load_data, sandbox, false);
Some(new_pipeline_id)
}
}
}
fn handle_load_start_msg(&mut self, pipeline_id: PipelineId) {
let frame_id = self.get_top_level_frame_for_pipeline(pipeline_id);
let forward = !self.joint_session_future_is_empty(frame_id);
let back = !self.joint_session_past_is_empty(frame_id);
self.compositor_proxy.send(ToCompositorMsg::LoadStart(back, forward));
}
fn handle_load_complete_msg(&mut self, pipeline_id: PipelineId) {
let mut webdriver_reset = false;
if let Some((expected_pipeline_id, ref reply_chan)) = self.webdriver.load_channel {
debug!("Sending load to WebDriver");
if expected_pipeline_id == pipeline_id {
let _ = reply_chan.send(webdriver_msg::LoadStatus::LoadComplete);
webdriver_reset = true;
}
}
if webdriver_reset {
self.webdriver.load_channel = None;
}
let frame_id = self.get_top_level_frame_for_pipeline(pipeline_id);
let forward = !self.joint_session_future_is_empty(frame_id);
let back = !self.joint_session_past_is_empty(frame_id);
let root = self.root_frame_id == frame_id;
self.compositor_proxy.send(ToCompositorMsg::LoadComplete(back, forward, root));
self.handle_subframe_loaded(pipeline_id);
}
fn handle_traverse_history_msg(&mut self,
pipeline_id: Option<PipelineId>,
direction: TraversalDirection) {
let top_level_frame_id = pipeline_id
.map(|pipeline_id| self.get_top_level_frame_for_pipeline(pipeline_id))
.unwrap_or(self.root_frame_id);
let mut size = 0;
let mut table = HashMap::new();
match direction {
TraversalDirection::Forward(delta) => {
for entry in self.joint_session_future(top_level_frame_id).take(delta) {
size = size + 1;
table.insert(entry.frame_id, entry);
}
if size < delta {
return debug!("Traversing forward too much.");
}
},
TraversalDirection::Back(delta) => {
for entry in self.joint_session_past(top_level_frame_id).take(delta) {
size = size + 1;
table.insert(entry.frame_id, entry);
}
if size < delta {
return debug!("Traversing back too much.");
}
},
}
for (_, entry) in table {
self.traverse_to_entry(entry);
}
}
fn handle_joint_session_history_length(&self, pipeline_id: PipelineId, sender: IpcSender<u32>) {
let frame_id = self.get_top_level_frame_for_pipeline(pipeline_id);
// Initialize length at 1 to count for the current active entry
let mut length = 1;
for frame in self.full_frame_tree_iter(frame_id) {
length += frame.next.len();
length += frame.prev.len();
}
let _ = sender.send(length as u32);
}
fn handle_key_msg(&mut self, ch: Option<char>, key: Key, state: KeyState, mods: KeyModifiers) {
// Send to the explicitly focused pipeline (if it exists), or the root
// frame's current pipeline. If neither exist, fall back to sending to
// the compositor below.
let root_pipeline_id = self.frames.get(&self.root_frame_id)
.map(|root_frame| root_frame.pipeline_id);
let pipeline_id = self.focus_pipeline_id.or(root_pipeline_id);
match pipeline_id {
Some(pipeline_id) => {
let event = CompositorEvent::KeyEvent(ch, key, state, mods);
let msg = ConstellationControlMsg::SendEvent(pipeline_id, event);
let result = match self.pipelines.get(&pipeline_id) {
Some(pipeline) => pipeline.event_loop.send(msg),
None => return debug!("Pipeline {:?} got key event after closure.", pipeline_id),
};
if let Err(e) = result {
self.handle_send_error(pipeline_id, e);
}
},
None => {
let event = ToCompositorMsg::KeyEvent(ch, key, state, mods);
self.compositor_proxy.clone_compositor_proxy().send(event);
}
}
}
fn handle_reload_msg(&mut self) {
// Send Reload constellation msg to root script channel.
let root_pipeline_id = self.frames.get(&self.root_frame_id)
.map(|root_frame| root_frame.pipeline_id);
if let Some(pipeline_id) = root_pipeline_id {
let msg = ConstellationControlMsg::Reload(pipeline_id);
let result = match self.pipelines.get(&pipeline_id) {
Some(pipeline) => pipeline.event_loop.send(msg),
None => return debug!("Pipeline {:?} got reload event after closure.", pipeline_id),
};
if let Err(e) = result {
self.handle_send_error(pipeline_id, e);
}
}
}
fn handle_get_pipeline_title_msg(&mut self, pipeline_id: PipelineId) {
let result = match self.pipelines.get(&pipeline_id) {
None => return self.compositor_proxy.send(ToCompositorMsg::ChangePageTitle(pipeline_id, None)),
Some(pipeline) => pipeline.event_loop.send(ConstellationControlMsg::GetTitle(pipeline_id)),
};
if let Err(e) = result {
self.handle_send_error(pipeline_id, e);
}
}
fn handle_mozbrowser_event_msg(&mut self,
parent_pipeline_id: PipelineId,
pipeline_id: PipelineId,
event: MozBrowserEvent) {
assert!(PREFS.is_mozbrowser_enabled());
// Find the script channel for the given parent pipeline,
// and pass the event to that script thread.
// If the pipeline lookup fails, it is because we have torn down the pipeline,
// so it is reasonable to silently ignore the event.
let frame_id = self.pipelines.get(&pipeline_id).map(|pipeline| pipeline.frame_id);
match self.pipelines.get(&parent_pipeline_id) {
Some(pipeline) => pipeline.trigger_mozbrowser_event(frame_id, event),
None => warn!("Pipeline {:?} handling mozbrowser event after closure.", parent_pipeline_id),
}
}
fn handle_get_pipeline(&mut self, frame_id: Option<FrameId>,
resp_chan: IpcSender<Option<PipelineId>>) {
let frame_id = frame_id.unwrap_or(self.root_frame_id);
let current_pipeline_id = self.frames.get(&frame_id)
.map(|frame| frame.pipeline_id);
let pipeline_id_loaded = self.pending_frames.iter().rev()
.find(|x| x.old_pipeline_id == current_pipeline_id)
.map(|x| x.new_pipeline_id)
.or(current_pipeline_id);
if let Err(e) = resp_chan.send(pipeline_id_loaded) {
warn!("Failed get_pipeline response ({}).", e);
}
}
fn handle_get_frame(&mut self,
pipeline_id: PipelineId,
resp_chan: IpcSender<Option<FrameId>>) {
let frame_id = self.pipelines.get(&pipeline_id).map(|pipeline| pipeline.frame_id);
if let Err(e) = resp_chan.send(frame_id) {
warn!("Failed get_frame response ({}).", e);
}
}
fn focus_parent_pipeline(&mut self, pipeline_id: PipelineId) {
let (frame_id, parent_info) = match self.pipelines.get(&pipeline_id) {
Some(pipeline) => (pipeline.frame_id, pipeline.parent_info),
None => return warn!("Pipeline {:?} focus parent after closure.", pipeline_id),
};
let (parent_pipeline_id, _) = match parent_info {
Some(info) => info,
None => return debug!("Pipeline {:?} focus has no parent.", pipeline_id),
};
// Send a message to the parent of the provided pipeline (if it exists)
// telling it to mark the iframe element as focused.
let msg = ConstellationControlMsg::FocusIFrame(parent_pipeline_id, frame_id);
let result = match self.pipelines.get(&parent_pipeline_id) {
Some(pipeline) => pipeline.event_loop.send(msg),
None => return warn!("Pipeline {:?} focus after closure.", parent_pipeline_id),
};
if let Err(e) = result {
self.handle_send_error(parent_pipeline_id, e);
}
self.focus_parent_pipeline(parent_pipeline_id);
}
fn handle_focus_msg(&mut self, pipeline_id: PipelineId) {
self.focus_pipeline_id = Some(pipeline_id);
// Focus parent iframes recursively
self.focus_parent_pipeline(pipeline_id);
}
fn handle_remove_iframe_msg(&mut self, frame_id: FrameId) -> Vec<PipelineId> {
let result = self.full_frame_tree_iter(frame_id)
.flat_map(|frame| frame.next.iter().chain(frame.prev.iter())
.filter_map(|entry| entry.pipeline_id)
.chain(once(frame.pipeline_id)))
.collect();
self.close_frame(frame_id, ExitPipelineMode::Normal);
result
}
fn handle_set_visible_msg(&mut self, pipeline_id: PipelineId, visible: bool) {
let frame_id = match self.pipelines.get(&pipeline_id) {
Some(pipeline) => pipeline.frame_id,
None => return warn!("No frame associated with pipeline {:?}", pipeline_id),
};
let child_pipeline_ids: Vec<PipelineId> = self.full_frame_tree_iter(frame_id)
.flat_map(|frame| frame.prev.iter().chain(frame.next.iter())
.filter_map(|entry| entry.pipeline_id)
.chain(once(frame.pipeline_id)))
.collect();
for id in child_pipeline_ids {
if let Some(pipeline) = self.pipelines.get_mut(&id) {
pipeline.change_visibility(visible);
}
}
}
fn handle_visibility_change_complete(&mut self, pipeline_id: PipelineId, visibility: bool) {
let (frame_id, parent_pipeline_info) = match self.pipelines.get(&pipeline_id) {
None => return warn!("Visibity change for closed pipeline {:?}.", pipeline_id),
Some(pipeline) => (pipeline.frame_id, pipeline.parent_info),
};
if let Some((parent_pipeline_id, _)) = parent_pipeline_info {
let visibility_msg = ConstellationControlMsg::NotifyVisibilityChange(parent_pipeline_id,
frame_id,
visibility);
let result = match self.pipelines.get(&parent_pipeline_id) {
None => return warn!("Parent pipeline {:?} closed", parent_pipeline_id),
Some(parent_pipeline) => parent_pipeline.event_loop.send(visibility_msg),
};
if let Err(e) = result {
self.handle_send_error(parent_pipeline_id, e);
}
}
}
fn handle_create_canvas_paint_thread_msg(
&mut self,
size: &Size2D<i32>,
response_sender: IpcSender<IpcSender<CanvasMsg>>) {
let webrender_api = self.webrender_api_sender.clone();
let sender = CanvasPaintThread::start(*size, webrender_api,
opts::get().enable_canvas_antialiasing);
if let Err(e) = response_sender.send(sender) {
warn!("Create canvas paint thread response failed ({})", e);
}
}
fn handle_create_webgl_paint_thread_msg(
&mut self,
size: &Size2D<i32>,
attributes: GLContextAttributes,
response_sender: IpcSender<Result<(IpcSender<CanvasMsg>, GLLimits), String>>) {
let webrender_api = self.webrender_api_sender.clone();
let response = WebGLPaintThread::start(*size, attributes, webrender_api);
if let Err(e) = response_sender.send(response) {
warn!("Create WebGL paint thread response failed ({})", e);
}
}
fn handle_webdriver_msg(&mut self, msg: WebDriverCommandMsg) {
// Find the script channel for the given parent pipeline,
// and pass the event to that script thread.
match msg {
WebDriverCommandMsg::GetWindowSize(_, reply) => {
let _ = reply.send(self.window_size);
},
WebDriverCommandMsg::SetWindowSize(_, size, reply) => {
self.webdriver.resize_channel = Some(reply);
self.compositor_proxy.send(ToCompositorMsg::ResizeTo(size));
},
WebDriverCommandMsg::LoadUrl(pipeline_id, load_data, reply) => {
self.load_url_for_webdriver(pipeline_id, load_data, reply, false);
},
WebDriverCommandMsg::Refresh(pipeline_id, reply) => {
let load_data = match self.pipelines.get(&pipeline_id) {
Some(pipeline) => LoadData::new(pipeline.url.clone(), None, None),
None => return warn!("Pipeline {:?} Refresh after closure.", pipeline_id),
};
self.load_url_for_webdriver(pipeline_id, load_data, reply, true);
}
WebDriverCommandMsg::ScriptCommand(pipeline_id, cmd) => {
let control_msg = ConstellationControlMsg::WebDriverScriptCommand(pipeline_id, cmd);
let result = match self.pipelines.get(&pipeline_id) {
Some(pipeline) => pipeline.event_loop.send(control_msg),
None => return warn!("Pipeline {:?} ScriptCommand after closure.", pipeline_id),
};
if let Err(e) = result {
self.handle_send_error(pipeline_id, e);
}
},
WebDriverCommandMsg::SendKeys(pipeline_id, cmd) => {
let event_loop = match self.pipelines.get(&pipeline_id) {
Some(pipeline) => pipeline.event_loop.clone(),
None => return warn!("Pipeline {:?} SendKeys after closure.", pipeline_id),
};
for (key, mods, state) in cmd {
let event = CompositorEvent::KeyEvent(None, key, state, mods);
let control_msg = ConstellationControlMsg::SendEvent(pipeline_id, event);
if let Err(e) = event_loop.send(control_msg) {
return self.handle_send_error(pipeline_id, e);
}
}
},
WebDriverCommandMsg::TakeScreenshot(pipeline_id, reply) => {
let current_pipeline_id = self.frames.get(&self.root_frame_id)
.map(|root_frame| root_frame.pipeline_id);
if Some(pipeline_id) == current_pipeline_id {
self.compositor_proxy.send(ToCompositorMsg::CreatePng(reply));
} else {
if let Err(e) = reply.send(None) {
warn!("Screenshot reply failed ({})", e);
}
}
},
}
}
// https://html.spec.whatwg.org/multipage/#traverse-the-history
fn traverse_to_entry(&mut self, entry: FrameState) {
// Step 1.
let frame_id = entry.frame_id;
let pipeline_id = match entry.pipeline_id {
Some(pipeline_id) => pipeline_id,
None => {
// If there is no pipeline, then the document for this
// entry has been discarded, so we navigate to the entry
// URL instead. When the document has activated, it will
// traverse to the entry, but with the new pipeline id.
debug!("Reloading document {} for frame {}.", entry.url, frame_id);
// TODO: referrer?
let load_data = LoadData::new(entry.url.clone(), None, None);
// TODO: save the sandbox state so it can be restored here.
let sandbox = IFrameSandboxState::IFrameUnsandboxed;
let new_pipeline_id = PipelineId::new();
let (old_pipeline_id, parent_info, window_size, is_private) = match self.frames.get(&frame_id) {
Some(frame) => match self.pipelines.get(&frame.pipeline_id) {
Some(pipeline) => (frame.pipeline_id, pipeline.parent_info, pipeline.size, pipeline.is_private),
None => (frame.pipeline_id, None, None, false),
},
None => return warn!("no frame to traverse"),
};
self.new_pipeline(new_pipeline_id, frame_id, parent_info, window_size, load_data, sandbox, is_private);
self.pending_frames.push(FrameChange {
frame_id: frame_id,
old_pipeline_id: Some(old_pipeline_id),
new_pipeline_id: new_pipeline_id,
url: entry.url.clone(),
replace: Some(entry),
});
return;
}
};
// Check if the currently focused pipeline is the pipeline being replaced
// (or a child of it). This has to be done here, before the current
// frame tree is modified below.
let update_focus_pipeline = self.focused_pipeline_in_tree(entry.frame_id);
let old_pipeline_id = match self.frames.get_mut(&frame_id) {
Some(frame) => {
let old_pipeline_id = frame.pipeline_id;
let mut curr_entry = frame.current();
if entry.instant > frame.instant {
// We are traversing to the future.
while let Some(next) = frame.next.pop() {
frame.prev.push(curr_entry);
curr_entry = next;
if entry.instant <= curr_entry.instant { break; }
}
} else if entry.instant < frame.instant {
// We are traversing to the past.
while let Some(prev) = frame.prev.pop() {
frame.next.push(curr_entry);
curr_entry = prev;
if entry.instant >= curr_entry.instant { break; }
}
}
debug_assert_eq!(entry.instant, curr_entry.instant);
frame.update_current(pipeline_id, &entry);
old_pipeline_id
},
None => return warn!("no frame to traverse"),
};
let parent_info = self.pipelines.get(&old_pipeline_id)
.and_then(|pipeline| pipeline.parent_info);
// If the currently focused pipeline is the one being changed (or a child
// of the pipeline being changed) then update the focus pipeline to be
// the replacement.
if update_focus_pipeline {
self.focus_pipeline_id = Some(pipeline_id);
}
// Deactivate the old pipeline, and activate the new one.
self.update_activity(old_pipeline_id);
self.update_activity(pipeline_id);
// Set paint permissions correctly for the compositor layers.
self.send_frame_tree();
// Update the owning iframe to point to the new pipeline id.
// This makes things like contentDocument work correctly.
if let Some((parent_pipeline_id, _)) = parent_info {
let msg = ConstellationControlMsg::UpdatePipelineId(parent_pipeline_id, frame_id, pipeline_id);
let result = match self.pipelines.get(&parent_pipeline_id) {
None => return warn!("Pipeline {:?} child traversed after closure.", parent_pipeline_id),
Some(pipeline) => pipeline.event_loop.send(msg),
};
if let Err(e) = result {
self.handle_send_error(parent_pipeline_id, e);
}
// If this is an iframe, send a mozbrowser location change event.
// This is the result of a back/forward traversal.
self.trigger_mozbrowserlocationchange(pipeline_id);
}
}
fn get_top_level_frame_for_pipeline(&self, mut pipeline_id: PipelineId) -> FrameId {
if PREFS.is_mozbrowser_enabled() {
loop {
match self.pipelines.get(&pipeline_id) {
Some(pipeline) => match pipeline.parent_info {
Some((_, FrameType::MozBrowserIFrame)) => return pipeline.frame_id,
Some((parent_id, _)) => pipeline_id = parent_id,
None => return self.root_frame_id,
},
None => {
warn!("Finding top-level ancestor for pipeline {} after closure.", pipeline_id);
return self.root_frame_id;
},
}
}
} else {
// If mozbrowser is not enabled, the root frame is the only top-level frame
self.root_frame_id
}
}
fn load_url_for_webdriver(&mut self,
pipeline_id: PipelineId,
load_data: LoadData,
reply: IpcSender<webdriver_msg::LoadStatus>,
replace: bool) {
let new_pipeline_id = self.load_url(pipeline_id, load_data, replace);
if let Some(id) = new_pipeline_id {
self.webdriver.load_channel = Some((id, reply));
}
}
fn add_or_replace_pipeline_in_frame_tree(&mut self, frame_change: FrameChange) {
debug!("Setting frame {} to be pipeline {}.", frame_change.frame_id, frame_change.new_pipeline_id);
// If the currently focused pipeline is the one being changed (or a child
// of the pipeline being changed) then update the focus pipeline to be
// the replacement.
if let Some(old_pipeline_id) = frame_change.old_pipeline_id {
if let Some(old_frame_id) = self.pipelines.get(&old_pipeline_id).map(|pipeline| pipeline.frame_id) {
if self.focused_pipeline_in_tree(old_frame_id) {
self.focus_pipeline_id = Some(frame_change.new_pipeline_id);
}
}
}
let (evicted_id, new_frame, navigated, location_changed) = if let Some(mut entry) = frame_change.replace {
debug!("Replacing pipeline in existing frame.");
let evicted_id = entry.pipeline_id;
entry.replace_pipeline(frame_change.new_pipeline_id, frame_change.url.clone());
self.traverse_to_entry(entry);
(evicted_id, false, None, false)
} else if let Some(frame) = self.frames.get_mut(&frame_change.frame_id) {
debug!("Adding pipeline to existing frame.");
let old_pipeline_id = frame.pipeline_id;
frame.load(frame_change.new_pipeline_id, frame_change.url.clone());
let evicted_id = frame.prev.len()
.checked_sub(PREFS.get("session-history.max-length").as_u64().unwrap_or(20) as usize)
.and_then(|index| frame.prev.get_mut(index))
.and_then(|entry| entry.pipeline_id.take());
(evicted_id, false, Some(old_pipeline_id), true)
} else {
debug!("Adding pipeline to new frame.");
(None, true, None, true)
};
if let Some(evicted_id) = evicted_id {
self.close_pipeline(evicted_id, DiscardBrowsingContext::No, ExitPipelineMode::Normal);
}
if new_frame {
self.new_frame(frame_change.frame_id, frame_change.new_pipeline_id, frame_change.url);
self.update_activity(frame_change.new_pipeline_id);
};
if let Some(old_pipeline_id) = navigated {
// Deactivate the old pipeline, and activate the new one.
self.update_activity(old_pipeline_id);
self.update_activity(frame_change.new_pipeline_id);
// Clear the joint session future
let top_level_frame_id = self.get_top_level_frame_for_pipeline(frame_change.new_pipeline_id);
self.clear_joint_session_future(top_level_frame_id);
}
if location_changed {
self.trigger_mozbrowserlocationchange(frame_change.new_pipeline_id);
}
// Build frame tree
self.send_frame_tree();
}
fn handle_activate_document_msg(&mut self, pipeline_id: PipelineId) {
debug!("Document ready to activate {}", pipeline_id);
// Notify the parent (if there is one).
if let Some(pipeline) = self.pipelines.get(&pipeline_id) {
if let Some((parent_pipeline_id, _)) = pipeline.parent_info {
if let Some(parent_pipeline) = self.pipelines.get(&parent_pipeline_id) {
let msg = ConstellationControlMsg::FramedContentChanged(parent_pipeline_id, pipeline.frame_id);
let _ = parent_pipeline.event_loop.send(msg);
}
}
}
// Find the pending frame change whose new pipeline id is pipeline_id.
let pending_index = self.pending_frames.iter().rposition(|frame_change| {
frame_change.new_pipeline_id == pipeline_id
});
// If it is found, remove it from the pending frames, and make it
// the active document of its frame.
if let Some(pending_index) = pending_index {
let frame_change = self.pending_frames.swap_remove(pending_index);
self.add_or_replace_pipeline_in_frame_tree(frame_change);
}
}
/// Called when the window is resized.
fn handle_window_size_msg(&mut self, new_size: WindowSizeData, size_type: WindowSizeType) {
debug!("handle_window_size_msg: {:?}", new_size.initial_viewport.to_untyped());
if let Some(frame) = self.frames.get(&self.root_frame_id) {
// Send Resize (or ResizeInactive) messages to each
// pipeline in the frame tree.
let pipeline_id = frame.pipeline_id;
let pipeline = match self.pipelines.get(&pipeline_id) {
None => return warn!("Pipeline {:?} resized after closing.", pipeline_id),
Some(pipeline) => pipeline,
};
let _ = pipeline.event_loop.send(ConstellationControlMsg::Resize(
pipeline.id,
new_size,
size_type
));
let pipelines = frame.prev.iter().chain(frame.next.iter())
.filter_map(|entry| entry.pipeline_id)
.filter_map(|pipeline_id| self.pipelines.get(&pipeline_id));
for pipeline in pipelines {
let _ = pipeline.event_loop.send(ConstellationControlMsg::ResizeInactive(
pipeline.id,
new_size
));
}
}
// Send resize message to any pending pipelines that aren't loaded yet.
for pending_frame in &self.pending_frames {
let pipeline_id = pending_frame.new_pipeline_id;
let pipeline = match self.pipelines.get(&pipeline_id) {
None => { warn!("Pending pipeline {:?} is closed", pipeline_id); continue; }
Some(pipeline) => pipeline,
};
if pipeline.parent_info.is_none() {
let _ = pipeline.event_loop.send(ConstellationControlMsg::Resize(
pipeline.id,
new_size,
size_type
));
}
}
if let Some(resize_channel) = self.webdriver.resize_channel.take() {
let _ = resize_channel.send(new_size);
}
self.window_size = new_size;
}
/// Handle updating actual viewport / zoom due to @viewport rules
fn handle_viewport_constrained_msg(&mut self,
pipeline_id: PipelineId,
constraints: ViewportConstraints) {
self.compositor_proxy.send(ToCompositorMsg::ViewportConstrained(pipeline_id, constraints));
}
/// Checks the state of all script and layout pipelines to see if they are idle
/// and compares the current layout state to what the compositor has. This is used
/// to check if the output image is "stable" and can be written as a screenshot
/// for reftests.
/// Since this function is only used in reftests, we do not harden it against panic.
fn handle_is_ready_to_save_image(&mut self,
pipeline_states: HashMap<PipelineId, Epoch>) -> ReadyToSave {
// Note that this function can panic, due to ipc-channel creation failure.
// avoiding this panic would require a mechanism for dealing
// with low-resource scenarios.
//
// If there is no root frame yet, the initial page has
// not loaded, so there is nothing to save yet.
if !self.frames.contains_key(&self.root_frame_id) {
return ReadyToSave::NoRootFrame;
}
// If there are pending loads, wait for those to complete.
if !self.pending_frames.is_empty() {
return ReadyToSave::PendingFrames;
}
let (state_sender, state_receiver) = ipc::channel().expect("Failed to create IPC channel!");
let (epoch_sender, epoch_receiver) = ipc::channel().expect("Failed to create IPC channel!");
// Step through the current frame tree, checking that the script
// thread is idle, and that the current epoch of the layout thread
// matches what the compositor has painted. If all these conditions
// are met, then the output image should not change and a reftest
// screenshot can safely be written.
for frame in self.current_frame_tree_iter(self.root_frame_id) {
let pipeline_id = frame.pipeline_id;
debug!("Checking readiness of frame {}, pipeline {}.", frame.id, pipeline_id);
let pipeline = match self.pipelines.get(&pipeline_id) {
None => {
warn!("Pipeline {:?} screenshot while closing.", pipeline_id);
continue;
},
Some(pipeline) => pipeline,
};
// Check to see if there are any webfonts still loading.
//
// If GetWebFontLoadState returns false, either there are no
// webfonts loading, or there's a WebFontLoaded message waiting in
// script_chan's message queue. Therefore, we need to check this
// before we check whether the document is ready; otherwise,
// there's a race condition where a webfont has finished loading,
// but hasn't yet notified the document.
let msg = LayoutControlMsg::GetWebFontLoadState(state_sender.clone());
if let Err(e) = pipeline.layout_chan.send(msg) {
warn!("Get web font failed ({})", e);
}
if state_receiver.recv().unwrap_or(true) {
return ReadyToSave::WebFontNotLoaded;
}
// See if this pipeline has reached idle script state yet.
match self.document_states.get(&frame.pipeline_id) {
Some(&DocumentState::Idle) => {}
Some(&DocumentState::Pending) | None => {
return ReadyToSave::DocumentLoading;
}
}
// Check the visible rectangle for this pipeline. If the constellation has received a
// size for the pipeline, then its painting should be up to date. If the constellation
// *hasn't* received a size, it could be that the layer was hidden by script before the
// compositor discovered it, so we just don't check the layer.
if let Some(size) = pipeline.size {
// If the rectangle for this pipeline is zero sized, it will
// never be painted. In this case, don't query the layout
// thread as it won't contribute to the final output image.
if size == TypedSize2D::zero() {
continue;
}
// Get the epoch that the compositor has drawn for this pipeline.
let compositor_epoch = pipeline_states.get(&frame.pipeline_id);
match compositor_epoch {
Some(compositor_epoch) => {
// Synchronously query the layout thread to see if the current
// epoch matches what the compositor has drawn. If they match
// (and script is idle) then this pipeline won't change again
// and can be considered stable.
let message = LayoutControlMsg::GetCurrentEpoch(epoch_sender.clone());
if let Err(e) = pipeline.layout_chan.send(message) {
warn!("Failed to send GetCurrentEpoch ({}).", e);
}
match epoch_receiver.recv() {
Err(e) => warn!("Failed to receive current epoch ({}).", e),
Ok(layout_thread_epoch) => if layout_thread_epoch != *compositor_epoch {
return ReadyToSave::EpochMismatch;
},
}
}
None => {
// The compositor doesn't know about this pipeline yet.
// Assume it hasn't rendered yet.
return ReadyToSave::PipelineUnknown;
}
}
}
}
// All script threads are idle and layout epochs match compositor, so output image!
ReadyToSave::Ready
}
/// Get the current activity of a pipeline.
fn get_activity(&self, pipeline_id: PipelineId) -> DocumentActivity {
let mut ancestor_id = pipeline_id;
loop {
if let Some(ancestor) = self.pipelines.get(&ancestor_id) {
if let Some(frame) = self.frames.get(&ancestor.frame_id) {
if frame.pipeline_id == ancestor_id {
if let Some((parent_id, FrameType::IFrame)) = ancestor.parent_info {
ancestor_id = parent_id;
continue;
} else {
return DocumentActivity::FullyActive;
}
}
}
}
if pipeline_id == ancestor_id {
return DocumentActivity::Inactive;
} else {
return DocumentActivity::Active;
}
}
}
/// Set the current activity of a pipeline.
fn set_activity(&self, pipeline_id: PipelineId, activity: DocumentActivity) {
debug!("Setting activity of {} to be {:?}.", pipeline_id, activity);
if let Some(pipeline) = self.pipelines.get(&pipeline_id) {
pipeline.set_activity(activity);
let child_activity = if activity == DocumentActivity::Inactive {
DocumentActivity::Active
} else {
activity
};
for child_id in &pipeline.children {
if let Some(child) = self.frames.get(child_id) {
self.set_activity(child.pipeline_id, child_activity);
}
}
}
}
/// Update the current activity of a pipeline.
fn update_activity(&self, pipeline_id: PipelineId) {
self.set_activity(pipeline_id, self.get_activity(pipeline_id));
}
fn clear_joint_session_future(&mut self, frame_id: FrameId) {
let frame_ids: Vec<FrameId> = self.full_frame_tree_iter(frame_id)
.map(|frame| frame.id)
.collect();
for frame_id in frame_ids {
let evicted = match self.frames.get_mut(&frame_id) {
Some(frame) => frame.remove_forward_entries(),
None => continue,
};
for entry in evicted {
if let Some(pipeline_id) = entry.pipeline_id {
self.close_pipeline(pipeline_id, DiscardBrowsingContext::No, ExitPipelineMode::Normal);
}
}
}
}
// Close a frame (and all children)
fn close_frame(&mut self, frame_id: FrameId, exit_mode: ExitPipelineMode) {
debug!("Closing frame {}.", frame_id);
let parent_info = self.frames.get(&frame_id)
.and_then(|frame| self.pipelines.get(&frame.pipeline_id))
.and_then(|pipeline| pipeline.parent_info);
self.close_frame_children(frame_id, DiscardBrowsingContext::Yes, exit_mode);
self.event_loops.remove(&frame_id);
if self.frames.remove(&frame_id).is_none() {
warn!("Closing frame {:?} twice.", frame_id);
}
if let Some((parent_pipeline_id, _)) = parent_info {
let parent_pipeline = match self.pipelines.get_mut(&parent_pipeline_id) {
None => return warn!("Pipeline {:?} child closed after parent.", parent_pipeline_id),
Some(parent_pipeline) => parent_pipeline,
};
parent_pipeline.remove_child(frame_id);
}
debug!("Closed frame {:?}.", frame_id);
}
// Close the children of a frame
fn close_frame_children(&mut self, frame_id: FrameId, dbc: DiscardBrowsingContext, exit_mode: ExitPipelineMode) {
debug!("Closing frame children {}.", frame_id);
// Store information about the pipelines to be closed. Then close the
// pipelines, before removing ourself from the frames hash map. This
// ordering is vital - so that if close_pipeline() ends up closing
// any child frames, they can be removed from the parent frame correctly.
let mut pipelines_to_close: Vec<PipelineId> = self.pending_frames.iter()
.filter(|frame_change| frame_change.frame_id == frame_id)
.map(|frame_change| frame_change.new_pipeline_id)
.collect();
if let Some(frame) = self.frames.get(&frame_id) {
pipelines_to_close.extend(frame.next.iter().filter_map(|state| state.pipeline_id));
pipelines_to_close.push(frame.pipeline_id);
pipelines_to_close.extend(frame.prev.iter().filter_map(|state| state.pipeline_id));
}
for pipeline_id in pipelines_to_close {
self.close_pipeline(pipeline_id, dbc, exit_mode);
}
debug!("Closed frame children {}.", frame_id);
}
// Close all pipelines at and beneath a given frame
fn close_pipeline(&mut self, pipeline_id: PipelineId, dbc: DiscardBrowsingContext, exit_mode: ExitPipelineMode) {
debug!("Closing pipeline {:?}.", pipeline_id);
// Store information about the frames to be closed. Then close the
// frames, before removing ourself from the pipelines hash map. This
// ordering is vital - so that if close_frame() ends up closing
// any child pipelines, they can be removed from the parent pipeline correctly.
let frames_to_close = {
let mut frames_to_close = vec!();
if let Some(pipeline) = self.pipelines.get(&pipeline_id) {
frames_to_close.extend_from_slice(&pipeline.children);
}
frames_to_close
};
// Remove any child frames
for child_frame in &frames_to_close {
self.close_frame(*child_frame, exit_mode);
}
// Note, we don't remove the pipeline now, we wait for the message to come back from
// the pipeline.
let pipeline = match self.pipelines.get(&pipeline_id) {
Some(pipeline) => pipeline,
None => return warn!("Closing pipeline {:?} twice.", pipeline_id),
};
// Remove this pipeline from pending frames if it hasn't loaded yet.
let pending_index = self.pending_frames.iter().position(|frame_change| {
frame_change.new_pipeline_id == pipeline_id
});
if let Some(pending_index) = pending_index {
self.pending_frames.remove(pending_index);
}
// Inform script, compositor that this pipeline has exited.
match exit_mode {
ExitPipelineMode::Normal => pipeline.exit(dbc),
ExitPipelineMode::Force => pipeline.force_exit(dbc),
}
debug!("Closed pipeline {:?}.", pipeline_id);
}
// Randomly close a pipeline -if --random-pipeline-closure-probability is set
fn maybe_close_random_pipeline(&mut self) {
match self.random_pipeline_closure {
Some((ref mut rng, probability)) => if probability <= rng.gen::<f32>() { return },
_ => return,
};
// In order to get repeatability, we sort the pipeline ids.
let mut pipeline_ids: Vec<&PipelineId> = self.pipelines.keys().collect();
pipeline_ids.sort();
if let Some((ref mut rng, _)) = self.random_pipeline_closure {
if let Some(pipeline_id) = rng.choose(&*pipeline_ids) {
if let Some(pipeline) = self.pipelines.get(pipeline_id) {
// Don't kill the mozbrowser pipeline
if PREFS.is_mozbrowser_enabled() && pipeline.parent_info.is_none() {
info!("Not closing mozbrowser pipeline {}.", pipeline_id);
} else {
// Note that we deliberately do not do any of the tidying up
// associated with closing a pipeline. The constellation should cope!
warn!("Randomly closing pipeline {}.", pipeline_id);
pipeline.force_exit(DiscardBrowsingContext::No);
}
}
}
}
}
// Convert a frame to a sendable form to pass to the compositor
fn frame_to_sendable(&self, frame_id: FrameId) -> Option<SendableFrameTree> {
self.frames.get(&frame_id).and_then(|frame: &Frame| {
self.pipelines.get(&frame.pipeline_id).map(|pipeline: &Pipeline| {
let mut frame_tree = SendableFrameTree {
pipeline: pipeline.to_sendable(),
size: pipeline.size,
children: vec!(),
};
for child_frame_id in &pipeline.children {
if let Some(frame) = self.frame_to_sendable(*child_frame_id) {
frame_tree.children.push(frame);
}
}
frame_tree
})
})
}
// Send the current frame tree to compositor
fn send_frame_tree(&mut self) {
// Note that this function can panic, due to ipc-channel creation failure.
// avoiding this panic would require a mechanism for dealing
// with low-resource scenarios.
debug!("Sending frame tree for frame {}.", self.root_frame_id);
if let Some(frame_tree) = self.frame_to_sendable(self.root_frame_id) {
let (chan, port) = ipc::channel().expect("Failed to create IPC channel!");
self.compositor_proxy.send(ToCompositorMsg::SetFrameTree(frame_tree,
chan));
if port.recv().is_err() {
warn!("Compositor has discarded SetFrameTree");
return; // Our message has been discarded, probably shutting down.
}
}
}
// https://developer.mozilla.org/en-US/docs/Web/Events/mozbrowserlocationchange
// Note that this is a no-op if the pipeline is not a mozbrowser iframe
fn trigger_mozbrowserlocationchange(&self, pipeline_id: PipelineId) {
match self.pipelines.get(&pipeline_id) {
Some(pipeline) => if let Some((parent_id, FrameType::MozBrowserIFrame)) = pipeline.parent_info {
match self.pipelines.get(&parent_id) {
Some(parent) => {
let can_go_forward = !self.joint_session_future_is_empty(pipeline.frame_id);
let can_go_back = !self.joint_session_past_is_empty(pipeline.frame_id);
let url = pipeline.url.to_string();
let event = MozBrowserEvent::LocationChange(url, can_go_back, can_go_forward);
parent.trigger_mozbrowser_event(Some(pipeline.frame_id), event);
},
None => warn!("triggered mozbrowser location change on closed parent {}", parent_id),
}
},
None => warn!("triggered mozbrowser location change on closed pipeline {}", pipeline_id),
}
}
// https://developer.mozilla.org/en-US/docs/Web/Events/mozbrowsererror
// Note that this does not require the pipeline to be an immediate child of the root
fn trigger_mozbrowsererror(&mut self, top_level_frame_id: FrameId, reason: String, backtrace: Option<String>) {
if !PREFS.is_mozbrowser_enabled() { return; }
let mut report = String::new();
for (thread_name, warning) in self.handled_warnings.drain(..) {
report.push_str("\nWARNING: ");
if let Some(thread_name) = thread_name {
report.push_str("<");
report.push_str(&*thread_name);
report.push_str(">: ");
}
report.push_str(&*warning);
}
report.push_str("\nERROR: ");
report.push_str(&*reason);
if let Some(backtrace) = backtrace {
report.push_str("\n\n");
report.push_str(&*backtrace);
}
let event = MozBrowserEvent::Error(MozBrowserErrorType::Fatal, reason, report);
match self.frames.get(&top_level_frame_id) {
None => warn!("Mozbrowser error after top-level frame closed."),
Some(frame) => match self.pipelines.get(&frame.pipeline_id) {
None => warn!("Mozbrowser error after top-level pipeline closed."),
Some(pipeline) => match pipeline.parent_info {
None => pipeline.trigger_mozbrowser_event(None, event),
Some((parent_id, _)) => match self.pipelines.get(&parent_id) {
None => warn!("Mozbrowser error after root pipeline closed."),
Some(parent) => parent.trigger_mozbrowser_event(Some(top_level_frame_id), event),
},
},
},
};
}
fn focused_pipeline_in_tree(&self, frame_id: FrameId) -> bool {
self.focus_pipeline_id.map_or(false, |pipeline_id| {
self.pipeline_exists_in_tree(pipeline_id, frame_id)
})
}
fn pipeline_is_in_current_frame(&self, pipeline_id: PipelineId) -> bool {
self.pipeline_exists_in_tree(pipeline_id, self.root_frame_id)
}
fn pipeline_exists_in_tree(&self,
pipeline_id: PipelineId,
root_frame_id: FrameId) -> bool {
self.current_frame_tree_iter(root_frame_id)
.any(|current_frame| current_frame.pipeline_id == pipeline_id)
}
}<|fim▁end|> | |
<|file_name|>test_unix_echo_server.rs<|end_file_name|><|fim▁begin|>use mio::*;
use mio::unix::*;
use bytes::{Buf, ByteBuf, MutByteBuf, SliceBuf};
use mio::util::Slab;
use std::path::PathBuf;
use std::io;
use tempdir::TempDir;
const SERVER: Token = Token(0);
const CLIENT: Token = Token(1);
struct EchoConn {
sock: UnixStream,
buf: Option<ByteBuf>,
mut_buf: Option<MutByteBuf>,
token: Option<Token>,
interest: EventSet,
}
impl EchoConn {
fn new(sock: UnixStream) -> EchoConn {
EchoConn {
sock: sock,
buf: None,
mut_buf: Some(ByteBuf::mut_with_capacity(2048)),
token: None,
interest: EventSet::hup(),
}
}
fn writable(&mut self, event_loop: &mut EventLoop<Echo>) -> io::Result<()> {
let mut buf = self.buf.take().unwrap();
match self.sock.try_write_buf(&mut buf) {
Ok(None) => {
debug!("client flushing buf; WOULDBLOCK");
self.buf = Some(buf);
self.interest.insert(EventSet::writable());
}
Ok(Some(r)) => {
debug!("CONN : we wrote {} bytes!", r);
self.mut_buf = Some(buf.flip());
self.interest.insert(EventSet::readable());
self.interest.remove(EventSet::writable());
}
Err(e) => debug!("not implemented; client err={:?}", e),
}
event_loop.reregister(&self.sock, self.token.unwrap(), self.interest, PollOpt::edge() | PollOpt::oneshot())
}
fn readable(&mut self, event_loop: &mut EventLoop<Echo>) -> io::Result<()> {
let mut buf = self.mut_buf.take().unwrap();
match self.sock.try_read_buf(&mut buf) {
Ok(None) => {
debug!("CONN : spurious read wakeup");
self.mut_buf = Some(buf);
}
Ok(Some(r)) => {
debug!("CONN : we read {} bytes!", r);
// prepare to provide this to writable
self.buf = Some(buf.flip());
self.interest.remove(EventSet::readable());
self.interest.insert(EventSet::writable());
}
Err(e) => {
debug!("not implemented; client err={:?}", e);
self.interest.remove(EventSet::readable());
}
};
event_loop.reregister(&self.sock, self.token.unwrap(), self.interest, PollOpt::edge() | PollOpt::oneshot())
}
}
struct EchoServer {
sock: UnixListener,
conns: Slab<EchoConn>
}
impl EchoServer {
fn accept(&mut self, event_loop: &mut EventLoop<Echo>) -> io::Result<()> {
debug!("server accepting socket");
let sock = self.sock.accept().unwrap().unwrap();
let conn = EchoConn::new(sock);
let tok = self.conns.insert(conn)
.ok().expect("could not add connectiont o slab");
// Register the connection
self.conns[tok].token = Some(tok);
event_loop.register(&self.conns[tok].sock, tok, EventSet::readable(), PollOpt::edge() | PollOpt::oneshot())
.ok().expect("could not register socket with event loop");
Ok(())
}
fn conn_readable(&mut self, event_loop: &mut EventLoop<Echo>, tok: Token) -> io::Result<()> {
debug!("server conn readable; tok={:?}", tok);
self.conn(tok).readable(event_loop)
}
fn conn_writable(&mut self, event_loop: &mut EventLoop<Echo>, tok: Token) -> io::Result<()> {
debug!("server conn writable; tok={:?}", tok);
self.conn(tok).writable(event_loop)
}
fn conn<'a>(&'a mut self, tok: Token) -> &'a mut EchoConn {
&mut self.conns[tok]
}
}
struct EchoClient {
sock: UnixStream,
msgs: Vec<&'static str>,
tx: SliceBuf<'static>,
rx: SliceBuf<'static>,
mut_buf: Option<MutByteBuf>,
token: Token,
interest: EventSet,
}
// Sends a message and expects to receive the same exact message, one at a time
impl EchoClient {
fn new(sock: UnixStream, tok: Token, mut msgs: Vec<&'static str>) -> EchoClient {
let curr = msgs.remove(0);
EchoClient {
sock: sock,
msgs: msgs,
tx: SliceBuf::wrap(curr.as_bytes()),
rx: SliceBuf::wrap(curr.as_bytes()),
mut_buf: Some(ByteBuf::mut_with_capacity(2048)),
token: tok,
interest: EventSet::none(),
}
}
fn readable(&mut self, event_loop: &mut EventLoop<Echo>) -> io::Result<()> {
debug!("client socket readable");
let mut buf = self.mut_buf.take().unwrap();
match self.sock.try_read_buf(&mut buf) {
Ok(None) => {
debug!("CLIENT : spurious read wakeup");
self.mut_buf = Some(buf);
}
Ok(Some(r)) => {
debug!("CLIENT : We read {} bytes!", r);
// prepare for reading
let mut buf = buf.flip();
debug!("CLIENT : buf = {:?} -- rx = {:?}", buf.bytes(), self.rx.bytes());
while buf.has_remaining() {
let actual = buf.read_byte().unwrap();
let expect = self.rx.read_byte().unwrap();
assert!(actual == expect, "actual={}; expect={}", actual, expect);
}
self.mut_buf = Some(buf.flip());
self.interest.remove(EventSet::readable());
if !self.rx.has_remaining() {
self.next_msg(event_loop).unwrap();
}
}
Err(e) => {
panic!("not implemented; client err={:?}", e);
}
};
event_loop.reregister(&self.sock, self.token, self.interest, PollOpt::edge() | PollOpt::oneshot())
}
fn writable(&mut self, event_loop: &mut EventLoop<Echo>) -> io::Result<()> {
debug!("client socket writable");
match self.sock.try_write_buf(&mut self.tx) {
Ok(None) => {
debug!("client flushing buf; WOULDBLOCK");
self.interest.insert(EventSet::writable());
}
Ok(Some(r)) => {
debug!("CLIENT : we wrote {} bytes!", r);
self.interest.insert(EventSet::readable());
self.interest.remove(EventSet::writable());
}
Err(e) => debug!("not implemented; client err={:?}", e)
}
event_loop.reregister(&self.sock, self.token, self.interest, PollOpt::edge() | PollOpt::oneshot())
}
fn next_msg(&mut self, event_loop: &mut EventLoop<Echo>) -> io::Result<()> {
if self.msgs.is_empty() {
event_loop.shutdown();
return Ok(());
}
let curr = self.msgs.remove(0);
debug!("client prepping next message");
self.tx = SliceBuf::wrap(curr.as_bytes());
self.rx = SliceBuf::wrap(curr.as_bytes());
self.interest.insert(EventSet::writable());
event_loop.reregister(&self.sock, self.token, self.interest, PollOpt::edge() | PollOpt::oneshot())
}
}
struct Echo {
server: EchoServer,
client: EchoClient,
}
impl Echo {
fn new(srv: UnixListener, client: UnixStream, msgs: Vec<&'static str>) -> Echo {
Echo {
server: EchoServer {
sock: srv,
conns: Slab::new_starting_at(Token(2), 128)
},
client: EchoClient::new(client, CLIENT, msgs)
}
}
}
impl Handler for Echo {
type Timeout = usize;
type Message = ();
fn ready(&mut self, event_loop: &mut EventLoop<Echo>, token: Token, events: EventSet) {
if events.is_readable() {
match token {
SERVER => self.server.accept(event_loop).unwrap(),
CLIENT => self.client.readable(event_loop).unwrap(),
i => self.server.conn_readable(event_loop, i).unwrap()
};
}<|fim▁hole|> SERVER => panic!("received writable for token 0"),
CLIENT => self.client.writable(event_loop).unwrap(),
_ => self.server.conn_writable(event_loop, token).unwrap()
};
}
}
}
#[test]
pub fn test_unix_echo_server() {
debug!("Starting TEST_UNIX_ECHO_SERVER");
let mut event_loop = EventLoop::new().unwrap();
let tmp_dir = TempDir::new("test_unix_echo_server").unwrap();
let addr = tmp_dir.path().join(&PathBuf::from("sock"));
let srv = UnixListener::bind(&addr).unwrap();
info!("listen for connections");
event_loop.register(&srv, SERVER, EventSet::readable(), PollOpt::edge() | PollOpt::oneshot()).unwrap();
let sock = UnixStream::connect(&addr).unwrap();
// Connect to the server
event_loop.register(&sock, CLIENT, EventSet::writable(), PollOpt::edge() | PollOpt::oneshot()).unwrap();
// Start the event loop
event_loop.run(&mut Echo::new(srv, sock, vec!["foo", "bar"])).unwrap();
}<|fim▁end|> |
if events.is_writable() {
match token { |
<|file_name|>pipe.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python2
# -*- coding: utf-8 -*-
#
# pipe.py
#
# Copyright 2014 Giorgio Gilestro <gg@kozak>
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston,
# MA 02110-1301, USA.
#
#
# Listen from pipefile
# e.g.: echo "TEST COMMAND" > /tmp/pipefile
import os, tempfile
import logging
import threading
class pipe():
def __init__(self, pipefile, queue, actions):
"""
Reads from a pipe
"""
self.pipefile = pipefile
self.queue = queue
actions["pipe"] = {}
self.__makefifo()
self.listening_thread = threading.Thread(target=self.listen_from_pipe)
#self.listening_thread.daemon = True
self.isListening = True
self.listening_thread.start()
def transmit(self, received):
"""
"""
cmd = ("pipe", received)
self.queue.put(cmd)
def __makefifo(self):
"""
"""
try:
os.mkfifo(self.pipefile)
logging.debug("Listening to FIFO Pipe at %s" % self.pipefile)<|fim▁hole|> logging.debug("Error creating FIFO Pipe %s. File already existing?" % self.pipefile)
return False
def listen_from_pipe(self):
"""
"""
while self.isListening:
logging.debug("Listening from PIPE %s" % self.pipefile)
with open(self.pipefile) as fifo:
self.transmit(fifo.read().strip())
if __name__ == '__main__':
p = pipe("pipefile", "none")<|fim▁end|> | return True
except: |
<|file_name|>index.js<|end_file_name|><|fim▁begin|>export default {
name: 'help-view',
data() {
return {<|fim▁hole|> }
},
methods: {
}
}<|fim▁end|> | |
<|file_name|>wsgi.py<|end_file_name|><|fim▁begin|>"""
WSGI config for sitefinder_project project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/dev/howto/deployment/wsgi/
"""
import os
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "sitefinder_project.settings.production")
from django.core.wsgi import get_wsgi_application
application = get_wsgi_application()
# Wrap werkzeug debugger if DEBUG is on
from django.conf import settings<|fim▁hole|> try:
import django.views.debug
import six
from werkzeug.debug import DebuggedApplication
def null_technical_500_response(request, exc_type, exc_value, tb):
six.reraise(exc_type, exc_value, tb)
django.views.debug.technical_500_response = null_technical_500_response
application = DebuggedApplication(application, evalex=True)
except ImportError:
pass<|fim▁end|> | if settings.DEBUG: |
<|file_name|>exportevents.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
# Copyright (c) 2012-2013 Turbulenz Limited
from logging import basicConfig, CRITICAL, INFO, WARNING
import argparse
from urllib3 import connection_from_url
from urllib3.exceptions import HTTPError, SSLError
from simplejson import loads as json_loads, dump as json_dump
from gzip import GzipFile
from zlib import decompress as zlib_decompress
from time import strptime, strftime, gmtime
from calendar import timegm
from re import compile as re_compile
from sys import stdin, argv
from os import mkdir
from os.path import exists as path_exists, join as path_join, normpath
from getpass import getpass, GetPassWarning
from base64 import urlsafe_b64decode
__version__ = '2.1.2'
__dependencies__ = []
HUB_COOKIE_NAME = 'hub'
HUB_URL = 'https://hub.turbulenz.com/'
DATATYPE_DEFAULT = 'events'
DATATYPE_URL = { 'events': '/dynamic/project/%s/event-log',
'users': '/dynamic/project/%s/user-info' }
DAY = 86400
TODAY_START = (timegm(gmtime()) / DAY) * DAY
# pylint: disable=C0301
USERNAME_PATTERN = re_compile('^[a-z0-9]+[a-z0-9-]*$') # usernames
PROJECT_SLUG_PATTERN = re_compile('^[a-zA-Z0-9\-]*$') # game
# pylint: enable=C0301
class DateRange(object):
"""Maintain a time range between two dates. If only a start time is given it will generate a 24 hour period
starting at that time. Defaults to the start of the current day if no times are given"""
def __init__(self, start=TODAY_START, end=None):
self.start = start
if end:
self.end = end
else:
self.end = start + DAY
if self.start > self.end:
raise ValueError('Start date can\'t be greater than the end date')
def _range_str(t):
if t % DAY:
return strftime('%Y-%m-%d %H:%M:%SZ', gmtime(t))
else:
return strftime('%Y-%m-%d', gmtime(t))
self.start_str = _range_str(self.start)
if self.end % DAY:
self.end_str = _range_str(self.end)
else:
self.end_str = _range_str(self.end - DAY)
def filename_str(self):
if self.start_str == self.end_str:
return self.start_str
elif int(self.start / DAY) == int(self.end / DAY):
result = '%s_-_%s' % (strftime('%Y-%m-%d %H:%M:%SZ', gmtime(self.start)),
strftime('%Y-%m-%d %H:%M:%SZ', gmtime(self.end)))
return result.replace(' ', '_').replace(':', '-')
else:
result = '%s_-_%s' % (self.start_str, self.end_str)
return result.replace(' ', '_').replace(':', '-')
@staticmethod
def parse(range_str):
date_format = '%Y-%m-%d'
range_parts = range_str.split(':')
if len(range_parts) < 1:
error('Date not set')
exit(1)
elif len(range_parts) > 2:
error('Can\'t provide more than two dates for date range')
exit(1)
try:
start = int(timegm(strptime(range_parts[0], date_format)))
end = None
if len(range_parts) == 2:
end = int(timegm(strptime(range_parts[1], date_format))) + DAY
except ValueError:
error('Dates must be in the yyyy-mm-dd format')
exit(1)
return DateRange(start, end)
def log(message, new_line=True):
print '\r >> %s' % message,
if new_line:
print
def error(message):
log('[ERROR] - %s' % message)
def warning(message):
log('[WARNING] - %s' % message)
def _parse_args():
parser = argparse.ArgumentParser(description="Export event logs and anonymised user information of a game.")
parser.add_argument("-v", "--verbose", action="store_true", help="verbose output")
parser.add_argument("-s", "--silent", action="store_true", help="silent running")
parser.add_argument("--version", action='version', version=__version__)
parser.add_argument("-u", "--user", action="store",
help="Hub login username (will be requested if not provided)")
parser.add_argument("-p", "--password", action="store",
help="Hub login password (will be requested if not provided)")
parser.add_argument("-t", "--type", action="store", default=DATATYPE_DEFAULT,
help="type of data to download, either events or users (defaults to " + DATATYPE_DEFAULT + ")")
parser.add_argument("-d", "--daterange", action="store", default=TODAY_START,
help="individual 'yyyy-mm-dd' or range 'yyyy-mm-dd : yyyy-mm-dd' of dates to get the data " \
"for (defaults to today)")
parser.add_argument("-o", "--outputdir", action="store", default="",
help="folder to output the downloaded files to (defaults to current directory)")
<|fim▁hole|> parser.add_argument("--hub", default=HUB_URL, help="Hub url (defaults to https://hub.turbulenz.com/)")
parser.add_argument("project", metavar='project_slug', help="Slug of Hub project you wish to download from")
args = parser.parse_args(argv[1:])
if args.silent:
basicConfig(level=CRITICAL)
elif args.verbose:
basicConfig(level=INFO)
else:
basicConfig(level=WARNING)
if not PROJECT_SLUG_PATTERN.match(args.project):
error('Incorrect "project" format')
exit(-1)
username = args.user
if not username:
print 'Username: ',
username = stdin.readline()
if not username:
error('Login information required')
exit(-1)
username = username.strip()
args.user = username
if not USERNAME_PATTERN.match(username):
error('Incorrect "username" format')
exit(-1)
if not args.password:
try:
args.password = getpass()
except GetPassWarning:
error('Echo free password entry unsupported. Please provide a --password argument')
return -1
if args.type not in ['events', 'users']:
error('Type must be one of \'events\' or \'users\'')
exit(1)
if isinstance(args.daterange, int):
args.daterange = DateRange(args.daterange)
else:
args.daterange = DateRange.parse(args.daterange)
return args
def login(connection, options):
username = options.user
password = options.password
if not options.silent:
log('Login as "%s".' % username)
credentials = {'login': username,
'password': password,
'source': '/tool'}
try:
r = connection.request('POST',
'/dynamic/login',
fields=credentials,
retries=1,
redirect=False)
except (HTTPError, SSLError):
error('Connection to Hub failed!')
exit(-1)
if r.status != 200:
if r.status == 301:
redirect_location = r.headers.get('location', '')
end_domain = redirect_location.find('/dynamic/login')
error('Login is being redirected to "%s". Please verify the Hub URL.' % redirect_location[:end_domain])
else:
error('Wrong user login information!')
exit(-1)
cookie = r.headers.get('set-cookie', None)
login_info = json_loads(r.data)
# pylint: disable=E1103
if not cookie or HUB_COOKIE_NAME not in cookie or login_info.get('source') != credentials['source']:
error('Hub login failed!')
exit(-1)
# pylint: enable=E1103
return cookie
def logout(connection, cookie):
try:
connection.request('POST',
'/dynamic/logout',
headers={'Cookie': cookie},
redirect=False)
except (HTTPError, SSLError) as e:
error(str(e))
def _request_data(options):
daterange = options.daterange
params = { 'start_time': daterange.start,
'end_time': daterange.end,
'version': __version__ }
connection = connection_from_url(options.hub, timeout=8.0)
cookie = login(connection, options)
try:
r = connection.request('GET',
DATATYPE_URL[options.type] % options.project,
headers={'Cookie': cookie,
'Accept-Encoding': 'gzip'},
fields=params,
redirect=False)
except (HTTPError, SSLError) as e:
error(e)
exit(-1)
# pylint: disable=E1103
r_data = json_loads(r.data)
if r.status != 200:
error_msg = 'Wrong Hub answer.'
if r_data.get('msg', None):
error_msg += ' ' + r_data['msg']
if r.status == 403:
error_msg += ' Make sure the project you\'ve specified exists and you have access to it.'
error(error_msg)
exit(-1)
# pylint: enable=E1103
if options.verbose:
log('Data received from the hub')
log('Logging out')
logout(connection, cookie)
return r_data
def write_to_file(options, data, filename=None, output_path=None, force_overwrite=False):
if not filename:
filename = '%s-%s-%s.json' % (options.project, options.type, options.daterange.filename_str())
try:
if not output_path:
output_path = normpath(path_join(options.outputdir, filename))
if path_exists(output_path):
if options.overwrite or force_overwrite:
if not options.silent:
warning('Overwriting existing file: %s' % output_path)
elif not options.silent:
warning('Skipping existing file: %s' % output_path)
return
indentation = None
if options.indent:
indentation = 4
if isinstance(data, str):
data = json_loads(data)
with open(output_path, 'wb') as fout:
if isinstance(data, str):
fout.write(data)
else:
json_dump(data, fout, indent=indentation)
if options.verbose:
log('Finished writing to: %s' % output_path)
except (IOError, OSError) as e:
error(e)
exit(-1)
try:
# pylint: disable=F0401
from Crypto.Cipher.AES import new as aes_new, MODE_CBC
# pylint: enable=F0401
def decrypt_data(data, key):
# Need to use a key of length 32 bytes for AES-256
if len(key) != 32:
error('Invalid key length for AES-256')
exit(-1)
# IV is last 16 bytes
iv = data[-16 :]
data = data[: -16]
data = aes_new(key, MODE_CBC, iv).decrypt(data)
# Strip PKCS7 padding required for CBC
if len(data) % 16:
error('Corrupted data - invalid length')
exit(-1)
num_padding = ord(data[-1])
if num_padding > 16:
error('Corrupted data - invalid padding')
exit(-1)
return data[: -num_padding]
except ImportError:
from io import BytesIO
from subprocess import Popen, STDOUT, PIPE
from struct import pack
def decrypt_data(data, key):
# Need to use a key of length 32 bytes for AES-256
if len(key) != 32:
error('Invalid key length for AES-256')
exit(-1)
aesdata = BytesIO()
aesdata.write(key)
aesdata.write(pack('I', len(data)))
aesdata.write(data)
process = Popen('aesdecrypt', stderr=STDOUT, stdout=PIPE, stdin=PIPE, shell=True)
output, _ = process.communicate(input=aesdata.getvalue())
retcode = process.poll()
if retcode != 0:
error('Failed to run aesdecrypt, check it is on the path or install PyCrypto')
exit(-1)
return str(output)
def get_log_files_local(options, files_list, enc_key):
verbose = options.verbose
silent = options.silent
overwrite = options.overwrite
output_dir = options.outputdir
filename_prefix = options.project + '-'
try:
for filename in files_list:
if filename.startswith('http'):
error('Unexpected file to retrieve')
exit(-1)
# Format v1: 'eventlogspath/gamefolder/events-yyyy-mm-dd.json.gz'
# Format v2: 'eventlogspath/gamefolder/events-yyyy-mm-dd.bin'
# Convert to 'gameslug-events-yyyy-mm-dd.json'
filename_patched = filename_prefix + filename.rsplit('/', 1)[-1].split('.', 1)[0] + '.json'
output_path = normpath(path_join(output_dir, filename_patched))
if not overwrite and path_exists(output_path):
if not silent:
warning('Skipping existing file: %s' % output_path)
continue
if verbose:
log('Retrieving file: %s' % filename_patched)
if filename.endswith('.bin'):
with open(filename, 'rb') as fin:
file_content = fin.read()
file_content = decrypt_data(file_content, enc_key)
file_content = zlib_decompress(file_content)
else: # if filename.endswith('.json.gz'):
gzip_file = GzipFile(filename=filename, mode='rb')
file_content = gzip_file.read()
gzip_file.close()
file_content = decrypt_data(file_content, enc_key)
write_to_file(options, file_content, filename=filename_patched, output_path=output_path)
except (IOError, OSError) as e:
error(e)
exit(-1)
def get_log_files_s3(options, files_list, enc_key, connection):
verbose = options.verbose
silent = options.silent
overwrite = options.overwrite
output_dir = options.outputdir
filename_prefix = options.project + '-'
try:
for filename in files_list:
# Format v1: 'https://bucket.s3.amazonaws.com/gamefolder/events-yyyy-mm-dd.json?AWSAccessKeyId=keyid
# &Expires=timestamp&Signature=signature'
# Format v2: 'https://bucket.s3.amazonaws.com/gamefolder/events-yyyy-mm-dd.bin?AWSAccessKeyId=keyid
# &Expires=timestamp&Signature=signature'
# Convert to 'gameslug-events-yyyy-mm-dd.json'
filename_cleaned = filename.split('?', 1)[0].rsplit('/', 1)[-1]
filename_patched = filename_prefix + filename_cleaned.split('.', 1)[0] + '.json'
output_path = normpath(path_join(output_dir, filename_patched))
if not overwrite and path_exists(output_path):
if not silent:
warning('Skipping existing file: %s' % output_path)
continue
if verbose:
log('Requesting file: %s' % filename_patched)
r = connection.request('GET', filename, redirect=False)
# pylint: disable=E1103
if r.status != 200:
error_msg = 'Couldn\'t download %s.' % filename_patched
if r.data.get('msg', None):
error_msg += ' ' + r.data['msg']
error(str(r.status) + error_msg)
exit(-1)
# pylint: enable=E1103
r_data = decrypt_data(r.data, enc_key)
if filename_cleaned.endswith('.bin'):
r_data = zlib_decompress(r_data)
# Format v1 file gets uncompressed on download so we just decrypt it
write_to_file(options, r_data, filename=filename_patched, output_path=output_path)
except (HTTPError, SSLError) as e:
error(e)
exit(-1)
def get_objectid_timestamp(objectid):
return int(str(objectid)[0:8], 16)
def inline_array_events_local(options, today_log, array_files_list, enc_key):
verbose = options.verbose
to_sort = set()
try:
index = 0
for index, filename in enumerate(array_files_list):
# Format: 'eventlogspath/gamefolder/arrayevents/date(seconds)/objectid.bin'
# The objectid doesn't correspond to a database entry but is used for uniqueness and timestamp
filename = filename.replace('\\', '/')
event_objectid = filename.rsplit('/', 1)[-1].split('.', 1)[0]
timestamp = get_objectid_timestamp(event_objectid)
formatted_timestamp = strftime('%Y-%m-%d %H:%M:%S', gmtime(timestamp))
if verbose:
log('Retrieving events file ' + str(index + 1) + ' submitted at ' + formatted_timestamp)
with open(filename, 'rb') as fin:
file_content = fin.read()
file_content = decrypt_data(file_content, enc_key)
file_content = json_loads(zlib_decompress(file_content))
if not isinstance(file_content, list):
file_content = [file_content]
for event in file_content:
slug = event['slug']
del event['slug']
event['time'] = strftime('%Y-%m-%d %H:%M:%S', gmtime(event['time']))
if slug not in today_log:
today_log[slug] = { 'playEvents': [], 'customEvents': [] }
today_log[slug]['customEvents'].append(event)
# Maintaining a list of slugs to sort the customEvents by date for so that added array events appear in
# order but we do not unneccesarily sort large lists if an array event wasn't added to it
to_sort.add(slug)
for slug in to_sort:
today_log[slug]['customEvents'].sort(key=lambda k: k['time'])
return today_log
except (IOError, OSError) as e:
error(e)
exit(-1)
def inline_array_events_s3(options, today_log, array_files_list, enc_key, connection):
verbose = options.verbose
to_sort = set()
try:
for index, filename in enumerate(array_files_list):
# Format: 'https://bucket.s3.amazonaws.com/gamefolder/arrayevents/date(seconds)/objectid.bin?
# AWSAccessKeyId=keyid&Expires=timestamp&Signature=signature'
# The objectid doesn't correspond to a database entry but it used for uniqueness and timestamp
filename_cleaned = filename.split('?', 1)[0].rsplit('/', 1)[-1]
event_objectid = filename_cleaned.split('.', 1)[0]
timestamp = get_objectid_timestamp(event_objectid)
formatted_timestamp = strftime('%Y-%m-%d %H:%M:%S', gmtime(timestamp))
if verbose:
log('Requesting events file ' + str(index + 1) + ' submitted at ' + formatted_timestamp)
r = connection.request('GET', filename, redirect=False)
# pylint: disable=E1103
if r.status != 200:
error_msg = 'Couldn\'t download event %d.' % (index + 1)
if r.data.get('msg', None):
error_msg += ' ' + r.data['msg']
error(str(r.status) + error_msg)
exit(-1)
# pylint: enable=E1103
r_data = decrypt_data(r.data, enc_key)
r_data = json_loads(zlib_decompress(r_data))
if not isinstance(r_data, list):
r_data = [r_data]
for event in r_data:
slug = event['slug']
del event['slug']
event['time'] = strftime('%Y-%m-%d %H:%M:%S', gmtime(event['time']))
if slug not in today_log:
today_log[slug] = { 'playEvents': [], 'customEvents': [] }
today_log[slug]['customEvents'].append(event)
# Maintaining a list of slugs to sort the customEvents by date for so that added array events appear in
# order but we do not unneccesarily sort large lists if an array event wasn't added to it
to_sort.add(slug)
for slug in to_sort:
today_log[slug]['customEvents'].sort(key=lambda k: k['time'])
return today_log
except (HTTPError, SSLError) as e:
error(e)
exit(-1)
def patch_and_write_today_log(options, resp_daterange, today_log, array_files_list, enc_key, connection):
today_range = DateRange(int(resp_daterange.end / DAY) * DAY, int(resp_daterange.end))
filename = '%s-%s-%s.json' % (options.project, options.type, today_range.filename_str())
output_path = normpath(path_join(options.outputdir, filename))
if not options.overwrite and path_exists(output_path):
if not options.silent:
# Confirm skip as does not make sense to request today's data just to skip overwriting it locally
log('Overwriting is disabled. Are you sure you want to skip overwriting today\'s downloaded log? ' \
'(Press \'y\' to skip or \'n\' to overwrite)')
skip_options = ['y', 'n']
for attempt in xrange(1, 4): # default to skip after three bad attempts
log('', new_line=False)
skip = stdin.readline().strip().lower()
if skip in skip_options:
break
error('Please answer with \'y\' or \'n\'. (Attempt %d of 3)' % attempt)
if 'n' != skip:
warning('Skipping overwriting today\'s downloaded file: %s' % output_path)
return
else:
warning('Overwrite disabled but overwriting today\'s downloaded file: %s' % output_path)
else: # Do not ask in silent mode, default to the option passed
return
if array_files_list:
if options.verbose:
log('Patching today\'s log file to include array events')
if connection:
today_log = inline_array_events_s3(options, today_log, array_files_list, enc_key, connection)
else:
today_log = inline_array_events_local(options, today_log, array_files_list, enc_key)
write_to_file(options, today_log, filename=filename, output_path=output_path, force_overwrite=True)
# pylint: disable=E1103
def main():
options = _parse_args()
silent = options.silent
if not silent:
log('Downloading \'%s\' to %s.' % (options.type, options.outputdir or 'current directory'))
try:
r_data = _request_data(options)
try:
response_daterange = DateRange(r_data['start_time'], r_data['end_time'])
datatype = options.type
if 'users' == datatype:
user_data = r_data['user_data']
else: # if 'events' == datatype
logs_url = r_data['logs_url']
files_list = r_data['files_list']
array_files_list = r_data['array_files_list']
enc_key = r_data['key']
if enc_key is not None:
# enc_key can be a unicode string and we need a stream of ascii bytes
enc_key = urlsafe_b64decode(enc_key.encode('ascii'))
today_log = r_data['today_log']
except KeyError as e:
error('Missing information in response: %s' % e)
exit(-1)
del r_data
daterange = options.daterange
if not silent:
if response_daterange.start != daterange.start:
warning('Start date used (%s) not the same as what was specified (%s)' % \
(response_daterange.start_str, daterange.start_str))
if response_daterange.end != daterange.end:
warning('End date used (%s) not the same as what was specified (%s)' % \
(response_daterange.end_str, daterange.end_str))
options.daterange = response_daterange
output_dir = options.outputdir
if output_dir and not path_exists(output_dir):
# Not allowing creation of nested directories as greater chance of typos and misplaced files
mkdir(output_dir)
if 'users' == datatype:
write_to_file(options, user_data)
else: # if 'events' == datatype
connection = None
if logs_url and (files_list or array_files_list):
connection = connection_from_url(logs_url, timeout=8.0)
if files_list:
if logs_url:
get_log_files_s3(options, files_list, enc_key, connection)
else:
get_log_files_local(options, files_list, enc_key)
del files_list
if response_daterange.end > TODAY_START:
# Patch and write, if requested, today's log with the array events downloaded and inlined
patch_and_write_today_log(options, response_daterange, today_log, array_files_list, enc_key, connection)
del today_log
del array_files_list
if not silent:
log('Export completed successfully')
except KeyboardInterrupt:
if not silent:
warning('Program stopped by user')
exit(-1)
except OSError as e:
error(str(e))
exit(-1)
except Exception as e:
error(str(e))
exit(-1)
return 0
# pylint: enable=E1103
if __name__ == "__main__":
exit(main())<|fim▁end|> | parser.add_argument("-w", "--overwrite", action="store_true",
help="if a file to be downloaded exists in the output directory, " \
"overwrite instead of skipping it")
parser.add_argument("--indent", action="store_true", help="apply indentation to the JSON output") |
<|file_name|>advanced.js<|end_file_name|><|fim▁begin|>var Notify = require('../');
var notifier = new Notify();
notifier.notify({
"title": "Phil Coulson",
"subtitle": "Agent of S.H.I.E.L.D.",
"message": "If I come out, will you shoot me? 'Cause then I won't come out.",
"sound": "Funk", // case sensitive
"appIcon": __dirname + "/coulson.jpg",<|fim▁hole|>});
setTimeout(function() {
console.log("Done");
}, 5000);<|fim▁end|> | "contentImage": __dirname + "/coulson.jpg",
"open": "file://" + __dirname + "/coulson.jpg" |
<|file_name|>upgrade.rs<|end_file_name|><|fim▁begin|>use header::{Header, HeaderFormat};
use std::fmt;
use std::str::FromStr;
use header::parsing::{from_comma_delimited, fmt_comma_delimited};
use unicase::UniCase;
use self::Protocol::{WebSocket, ProtocolExt};
/// The `Upgrade` header.
#[derive(Clone, PartialEq, Debug)]
pub struct Upgrade(pub Vec<Protocol>);
deref!(Upgrade => Vec<Protocol>);
/// Protocol values that can appear in the Upgrade header.
#[derive(Clone, PartialEq, Debug)]
pub enum Protocol {
/// The websocket protocol.
WebSocket,
/// Some other less common protocol.
ProtocolExt(String),
}
<|fim▁hole|>impl FromStr for Protocol {
type Err = ();
fn from_str(s: &str) -> Result<Protocol, ()> {
if UniCase(s) == UniCase("websocket") {
Ok(WebSocket)
}
else {
Ok(ProtocolExt(s.to_string()))
}
}
}
impl fmt::Display for Protocol {
fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
write!(fmt, "{}", match *self {
WebSocket => "websocket",
ProtocolExt(ref s) => s.as_ref()
})
}
}
impl Header for Upgrade {
fn header_name() -> &'static str {
"Upgrade"
}
fn parse_header(raw: &[Vec<u8>]) -> Option<Upgrade> {
from_comma_delimited(raw).map(|vec| Upgrade(vec))
}
}
impl HeaderFormat for Upgrade {
fn fmt_header(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
let Upgrade(ref parts) = *self;
fmt_comma_delimited(fmt, &parts[..])
}
}
bench_header!(bench, Upgrade, { vec![b"HTTP/2.0, RTA/x11, websocket".to_vec()] });<|fim▁end|> | |
<|file_name|>Controlador_Conductor.java<|end_file_name|><|fim▁begin|>/*
* To change this license header, choose License Headers in Project Properties.
* To change this template file, choose Tools | Templates
* and open the template in the editor.<|fim▁hole|>import proyectomio.accesoDatos.Controlador_BD;
import proyectomio.modelo.Consulta;
/**
*
* @author root
*/
public class Controlador_Conductor {
private final Controlador_BD CONTROLADOR_BD = new Controlador_BD();
public Consulta consultar_buses_asignados(int id_conductor) {
if (id_conductor != 0) {
Consulta consulta = new Consulta();
consulta = CONTROLADOR_BD.consultarBD("SELECT * FROM bus_empleado inner join bus on bus_empleado.placa_bus = bus.placa inner join ruta on ruta.id_ruta = bus.id_ruta where bus_empleado.id_empleado = " + id_conductor);
return consulta;
}
else{
Consulta consulta = new Consulta();
consulta = CONTROLADOR_BD.consultarBD("SELECT * FROM (bus_empleado inner join bus on bus_empleado.placa_bus = bus.placa) inner join ruta on ruta.id_ruta = bus.id_ruta;");
return consulta;
}
}
}<|fim▁end|> | */
package proyectomio.controlador.operaciones;
|
<|file_name|>action.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
# Inforevealer
# Copyright (C) 2010 Francois Boulogne <fboulogne at april dot org>
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License along
# with this program; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
import io, readconf, getinfo, pastebin
import os, sys, gettext,string, pexpect,getpass
gettext.textdomain('inforevealer')
_ = gettext.gettext
__version__="0.5.1"
def askYesNo(question,default='y'):
""" Yes/no question throught a console """
if string.lower(default) == 'y':
question = question + " [Y/n]"
else:
question = question + " [y/N]"
ret = string.lower(raw_input(question))
if ret == 'y' or ret == "":
answer=True
else:
answer=False
return answer
def RunAs(category_info,gui=False):
""" Check if root is needed, if user want to be root... """
if gui: from gui import yesNoDialog
run_as='user'
if os.getuid() == 0:
#we are root
run_as='root'
else:
#check if root is needed
root_needed=False
for i in category_info:
if i.root:
root_needed=True
break
if root_needed:
#ask if the user want to substitute
question=_("""To generate a complete report, root access is needed.
Do you want to substitute user?""")
if gui:
#substitute=yesNoDialog(question=question)
substitute=True #It seems more confortable to remove the question
else:
#substitute=askYesNo(question)
substitute=True #It seems more confortable to remove the question
if substitute:
run_as="substitute"
else:
run_as="user"
else:
run_as='user'
return run_as<|fim▁hole|> """Run a new instance of inforevealer with root priviledge to complete tmp_configfile"""
if gui: from gui import askPassword
if run_as == "substitute":
#find the substitute user command and run the script
if pexpect.which('su') != None:
message=_("Please, enter the root password.")
root_instance = str(pexpect.which('su')) + " - -c \'"+ os.path.abspath(sys.argv[0])+" --runfile "+ tmp_configfile+"\'"
elif pexpect.which('sudo') != None: #TODO checkme
message=_("Please, enter your user password.")
root_instance = str(pexpect.which('sudo')) + ' ' + os.path.abspath(sys.argv[0])+' --runfile '+ tmp_configfile
else:
sys.stderr.write(_("Error: No substitute user command available.\n"))
return 1
ret=""
count=0
while ret!=[' \r\n'] and count <3:
#Get password
count+=1
if gui:
password=askPassword(question=message)
else:
print(message)
password=getpass.getpass()
if password != False: #askPassword could return False
#Run the command #TODO exceptions ?
child = pexpect.spawn(root_instance)
ret=child.expect([".*:",pexpect.EOF]) #Could we do more ?
child.sendline(password)
ret = child.readlines()
if ret ==[' \r\n']: return 0
message=_("Wrong password.\nThe log will be generated without root priviledge.")
if gui:
import gtk
md = gtk.MessageDialog(None, gtk.DIALOG_DESTROY_WITH_PARENT, gtk.MESSAGE_INFO, gtk.BUTTONS_CLOSE, message)
md.set_title(_("Error"))
md.run()
md.destroy()
else:
print(message)
def action(category,dumpfile,configfile,tmp_configfile,verbosity,gui=False):
if gui: from gui import yesNoDialog
#####################
# Write in dumpfile
#####################
dumpfile_handler= open(dumpfile,'w')
io.print_write_header(dumpfile_handler)
dumpfile_handler.write('Category: '+ category+'\n')
category_info = readconf.LoadCategoryInfo(configfile,category)
#need/want to run commands as...
run_as = RunAs(category_info,gui)
#detect which distribution the user uses
linux_distrib=getinfo.General_info(dumpfile_handler)
# In the case of run_as='substitute'
# a configuration file is generated
# su/sudo is used to run a new instance of inforevealer in append mode
# to complete the report
tmp_configfile_handler= open(tmp_configfile,'w')
for i in category_info:
i.write(linux_distrib,verbosity,dumpfile_handler,dumpfile,run_as,tmp_configfile_handler)
tmp_configfile_handler.close()
#Use su or sudo to complete the report
dumpfile_handler.close() #the next function will modify the report, close the dumpfile
CompleteReportAsRoot(run_as,tmp_configfile,gui)
# Message to close the report
dumpfile_handler= open(dumpfile,'a')
io.write_title("You didn\'t find what you expected?",dumpfile_handler)
dumpfile_handler.write( 'Please, open a bug report on\nhttp://github.com/sciunto/inforevealer\n')
dumpfile_handler.close()
print( _("The output has been dumped in %s") %dumpfile)<|fim▁end|> |
def CompleteReportAsRoot(run_as,tmp_configfile,gui=False): |
<|file_name|>event.rs<|end_file_name|><|fim▁begin|>use std::ops::Deref;
use std::any::Any;
use glium::Display;
use timer::Ms;
use render::Renderer;
pub use glium::glutin::Event as WindowEvent;
pub enum Event {
Window (WindowEvent),
Message (String),
Something (String, Box<Any>),
}
pub struct EventStream (Vec<Event>);
impl EventStream {
pub fn new(display: &Display) -> EventStream {
use glium::glutin::Event::MouseMoved;
let f = display.get_window().unwrap().hidpi_factor();
let (w, h) = display.get_framebuffer_dimensions();
let (w, h) = (w as f32, h as f32);
let events: Vec<_> = display.poll_events().map(|event| match event {
MouseMoved ((x, y)) => {
let (x, y) = (x as f32, y as f32);
MouseMoved((((x - w/2.0)/f) as i32, (-(y - h/2.0)/f) as i32))
}
x => x
}).map(|e| Event::Window(e)).collect();
EventStream(events)
}
}
impl Deref for EventStream {
type Target = Vec<Event>;
fn deref<'a>(&'a self) -> &'a Vec<Event> {
let &EventStream (ref x) = self;
return x;
}
}
pub trait Update {
fn update(&mut self, renderer: &Renderer, delta: Ms, stream: EventStream) -> EventStream;
}
impl<'a> Update for Vec<&'a mut Update> {
fn update(&mut self, renderer: &Renderer, delta: Ms, mut stream: EventStream)
-> EventStream
{
for item in self {
stream = item.update(renderer, delta, stream);
}
return stream;
}<|fim▁hole|>}<|fim▁end|> | |
<|file_name|>doc.go<|end_file_name|><|fim▁begin|>// Copyright 2013, 2014 Canonical Ltd.
// Licensed under the LGPLv3, see LICENCE file for details.
/*
[godoc-link-here]
The juju/errors provides an easy way to annotate errors without losing the
orginal error context.
The exported `New` and `Errorf` functions are designed to replace the
`errors.New` and `fmt.Errorf` functions respectively. The same underlying
error is there, but the package also records the location at which the error
was created.
A primary use case for this library is to add extra context any time an
error is returned from a function.
if err := SomeFunc(); err != nil {
return err
}
This instead becomes:
if err := SomeFunc(); err != nil {
return errors.Trace(err)
}
which just records the file and line number of the Trace call, or
if err := SomeFunc(); err != nil {
return errors.Annotate(err, "more context")
}
which also adds an annotation to the error.
When you want to check to see if an error is of a particular type, a helper
function is normally exported by the package that returned the error, like the
`os` package does. The underlying cause of the error is available using the
`Cause` function.
os.IsNotExist(errors.Cause(err))
The result of the `Error()` call on an annotated error is the annotations joined
with colons, then the result of the `Error()` method for the underlying error
that was the cause.
err := errors.Errorf("original")
err = errors.Annotatef(err, "context")
err = errors.Annotatef(err, "more context")
err.Error() -> "more context: context: original"
Obviously recording the file, line and functions is not very useful if you
cannot get them back out again.
errors.ErrorStack(err)
will return something like:
first error
yougam/libraries/juju/errors/annotation_test.go:193:
yougam/libraries/juju/errors/annotation_test.go:194: annotation
yougam/libraries/juju/errors/annotation_test.go:195:
yougam/libraries/juju/errors/annotation_test.go:196: more context<|fim▁hole|>
The first error was generated by an external system, so there was no location
associated. The second, fourth, and last lines were generated with Trace calls,
and the other two through Annotate.
Sometimes when responding to an error you want to return a more specific error
for the situation.
if err := FindField(field); err != nil {
return errors.Wrap(err, errors.NotFoundf(field))
}
This returns an error where the complete error stack is still available, and
`errors.Cause()` will return the `NotFound` error.
*/
package errors<|fim▁end|> | yougam/libraries/juju/errors/annotation_test.go:197: |
<|file_name|>pt.js<|end_file_name|><|fim▁begin|><|fim▁hole|>CKEDITOR.plugins.setLang( 'pagebreak', 'pt', {
alt: 'Quebra de página',
toolbar: 'Inserir quebra de página'
} );<|fim▁end|> | /*
Copyright (c) 2003-2019, CKSource - Frederico Knabben. All rights reserved.
For licensing, see LICENSE.md or https://ckeditor.com/legal/ckeditor-oss-license
*/ |
<|file_name|>prf.py<|end_file_name|><|fim▁begin|># This file is part of Scapy
# Copyright (C) 2007, 2008, 2009 Arnaud Ebalard
# 2015, 2016, 2017 Maxence Tury
# This program is published under a GPLv2 license
"""
TLS Pseudorandom Function.
"""
from __future__ import absolute_import
from scapy.error import warning
from scapy.utils import strxor
from scapy.layers.tls.crypto.hash import _tls_hash_algs
from scapy.layers.tls.crypto.h_mac import _tls_hmac_algs
from scapy.modules.six.moves import range
from scapy.compat import bytes_encode
# Data expansion functions
def _tls_P_hash(secret, seed, req_len, hm):
"""
Provides the implementation of P_hash function defined in
section 5 of RFC 4346 (and section 5 of RFC 5246). Two
parameters have been added (hm and req_len):
- secret : the key to be used. If RFC 4868 is to be believed,
the length must match hm.key_len. Actually,
python hmac takes care of formatting every key.
- seed : the seed to be used.
- req_len : the length of data to be generated by iterating
the specific HMAC function (hm). This prevents
multiple calls to the function.
- hm : the hmac function class to use for iteration (either
Hmac_MD5 or Hmac_SHA1 in TLS <= 1.1 or
Hmac_SHA256 or Hmac_SHA384 in TLS 1.2)
"""
hash_len = hm.hash_alg.hash_len
n = (req_len + hash_len - 1) // hash_len
seed = bytes_encode(seed)
res = b""
a = hm(secret).digest(seed) # A(1)
while n > 0:
res += hm(secret).digest(a + seed)
a = hm(secret).digest(a)
n -= 1
return res[:req_len]
def _tls_P_MD5(secret, seed, req_len):
return _tls_P_hash(secret, seed, req_len, _tls_hmac_algs["HMAC-MD5"])
def _tls_P_SHA1(secret, seed, req_len):
return _tls_P_hash(secret, seed, req_len, _tls_hmac_algs["HMAC-SHA"])
def _tls_P_SHA256(secret, seed, req_len):
return _tls_P_hash(secret, seed, req_len, _tls_hmac_algs["HMAC-SHA256"])
def _tls_P_SHA384(secret, seed, req_len):
return _tls_P_hash(secret, seed, req_len, _tls_hmac_algs["HMAC-SHA384"])
def _tls_P_SHA512(secret, seed, req_len):
return _tls_P_hash(secret, seed, req_len, _tls_hmac_algs["HMAC-SHA512"])
# PRF functions, according to the protocol version
def _sslv2_PRF(secret, seed, req_len):
hash_md5 = _tls_hash_algs["MD5"]()
rounds = (req_len + hash_md5.hash_len - 1) // hash_md5.hash_len
res = b""
if rounds == 1:
res += hash_md5.digest(secret + seed)
else:
r = 0
while r < rounds:
label = str(r).encode("utf8")
res += hash_md5.digest(secret + label + seed)
r += 1
return res[:req_len]
def _ssl_PRF(secret, seed, req_len):
"""
Provides the implementation of SSLv3 PRF function:
SSLv3-PRF(secret, seed) =
MD5(secret || SHA-1("A" || secret || seed)) ||
MD5(secret || SHA-1("BB" || secret || seed)) ||
MD5(secret || SHA-1("CCC" || secret || seed)) || ...
req_len should not be more than 26 x 16 = 416.
"""
if req_len > 416:
warning("_ssl_PRF() is not expected to provide more than 416 bytes")
return ""
d = [b"A", b"B", b"C", b"D", b"E", b"F", b"G", b"H", b"I", b"J", b"K", b"L", # noqa: E501
b"M", b"N", b"O", b"P", b"Q", b"R", b"S", b"T", b"U", b"V", b"W", b"X", # noqa: E501
b"Y", b"Z"]
res = b""
hash_sha1 = _tls_hash_algs["SHA"]()
hash_md5 = _tls_hash_algs["MD5"]()
rounds = (req_len + hash_md5.hash_len - 1) // hash_md5.hash_len
for i in range(rounds):
label = d[i] * (i + 1)
tmp = hash_sha1.digest(label + secret + seed)
res += hash_md5.digest(secret + tmp)
return res[:req_len]
def _tls_PRF(secret, label, seed, req_len):
"""
Provides the implementation of TLS PRF function as defined in
section 5 of RFC 4346:
PRF(secret, label, seed) = P_MD5(S1, label + seed) XOR
P_SHA-1(S2, label + seed)
Parameters are:
- secret: the secret used by the HMAC in the 2 expansion
functions (S1 and S2 are the halves of this secret).
- label: specific label as defined in various sections of the RFC
depending on the use of the generated PRF keystream
- seed: the seed used by the expansion functions.
- req_len: amount of keystream to be generated
"""
tmp_len = (len(secret) + 1) // 2
S1 = secret[:tmp_len]
S2 = secret[-tmp_len:]
a1 = _tls_P_MD5(S1, label + seed, req_len)
a2 = _tls_P_SHA1(S2, label + seed, req_len)
return strxor(a1, a2)
def _tls12_SHA256PRF(secret, label, seed, req_len):
"""
Provides the implementation of TLS 1.2 PRF function as
defined in section 5 of RFC 5246:
PRF(secret, label, seed) = P_SHA256(secret, label + seed)
Parameters are:
- secret: the secret used by the HMAC in the 2 expansion
functions (S1 and S2 are the halves of this secret).
- label: specific label as defined in various sections of the RFC
depending on the use of the generated PRF keystream
- seed: the seed used by the expansion functions.
- req_len: amount of keystream to be generated
"""
return _tls_P_SHA256(secret, label + seed, req_len)
def _tls12_SHA384PRF(secret, label, seed, req_len):
return _tls_P_SHA384(secret, label + seed, req_len)
def _tls12_SHA512PRF(secret, label, seed, req_len):
return _tls_P_SHA512(secret, label + seed, req_len)
class PRF(object):
"""
The PRF used by SSL/TLS varies based on the version of the protocol and
(for TLS 1.2) possibly the Hash algorithm of the negotiated cipher suite.
The various uses of the PRF (key derivation, computation of verify_data,
computation of pre_master_secret values) for the different versions of the
protocol also changes. In order to abstract those elements, the common
_tls_PRF() object is provided. It is expected to be initialised in the
context of the connection state using the tls_version and the cipher suite.
"""
def __init__(self, hash_name="SHA256", tls_version=0x0303):
self.tls_version = tls_version
self.hash_name = hash_name
if tls_version < 0x0300: # SSLv2
self.prf = _sslv2_PRF
elif tls_version == 0x0300: # SSLv3
self.prf = _ssl_PRF
elif (tls_version == 0x0301 or # TLS 1.0
tls_version == 0x0302): # TLS 1.1
self.prf = _tls_PRF
elif tls_version == 0x0303: # TLS 1.2
if hash_name == "SHA384":
self.prf = _tls12_SHA384PRF
elif hash_name == "SHA512":
self.prf = _tls12_SHA512PRF
else:
if hash_name in ["MD5", "SHA"]:
self.hash_name = "SHA256"
self.prf = _tls12_SHA256PRF
else:
warning("Unknown TLS version")
def compute_master_secret(self, pre_master_secret, client_random,
server_random, extms=False, handshake_hash=None):
"""
Return the 48-byte master_secret, computed from pre_master_secret,
client_random and server_random. See RFC 5246, section 6.3.
Supports Extended Master Secret Derivation, see RFC 7627
"""
seed = client_random + server_random
label = b'master secret'
if extms is True and handshake_hash is not None:
seed = handshake_hash
label = b'extended master secret'
if self.tls_version < 0x0300:
return None
elif self.tls_version == 0x0300:
return self.prf(pre_master_secret, seed, 48)
else:
return self.prf(pre_master_secret, label, seed, 48)
def derive_key_block(self, master_secret, server_random,
client_random, req_len):
"""
Perform the derivation of master_secret into a key_block of req_len
requested length. See RFC 5246, section 6.3.
"""
seed = server_random + client_random
if self.tls_version <= 0x0300:
return self.prf(master_secret, seed, req_len)
else:
return self.prf(master_secret, b"key expansion", seed, req_len)
def compute_verify_data(self, con_end, read_or_write,
handshake_msg, master_secret):
"""
Return verify_data based on handshake messages, connection end,
master secret, and read_or_write position. See RFC 5246, section 7.4.9.
Every TLS 1.2 cipher suite has a verify_data of length 12. Note also::
"This PRF with the SHA-256 hash function is used for all cipher
suites defined in this document and in TLS documents published
prior to this document when TLS 1.2 is negotiated."
Cipher suites using SHA-384 were defined later on.
"""
if self.tls_version < 0x0300:
return None
elif self.tls_version == 0x0300:
if read_or_write == "write":
d = {"client": b"CLNT", "server": b"SRVR"}
else:
d = {"client": b"SRVR", "server": b"CLNT"}
label = d[con_end]
sslv3_md5_pad1 = b"\x36" * 48
sslv3_md5_pad2 = b"\x5c" * 48
sslv3_sha1_pad1 = b"\x36" * 40
sslv3_sha1_pad2 = b"\x5c" * 40<|fim▁hole|>
md5_hash = md5.digest(master_secret + sslv3_md5_pad2 +
md5.digest(handshake_msg + label +
master_secret + sslv3_md5_pad1))
sha1_hash = sha1.digest(master_secret + sslv3_sha1_pad2 +
sha1.digest(handshake_msg + label +
master_secret + sslv3_sha1_pad1)) # noqa: E501
verify_data = md5_hash + sha1_hash
else:
if read_or_write == "write":
d = {"client": "client", "server": "server"}
else:
d = {"client": "server", "server": "client"}
label = ("%s finished" % d[con_end]).encode()
if self.tls_version <= 0x0302:
s1 = _tls_hash_algs["MD5"]().digest(handshake_msg)
s2 = _tls_hash_algs["SHA"]().digest(handshake_msg)
verify_data = self.prf(master_secret, label, s1 + s2, 12)
else:
h = _tls_hash_algs[self.hash_name]()
s = h.digest(handshake_msg)
verify_data = self.prf(master_secret, label, s, 12)
return verify_data
def postprocess_key_for_export(self, key, client_random, server_random,
con_end, read_or_write, req_len):
"""
Postprocess cipher key for EXPORT ciphersuite, i.e. weakens it.
An export key generation example is given in section 6.3.1 of RFC 2246.
See also page 86 of EKR's book.
"""
s = con_end + read_or_write
s = (s == "clientwrite" or s == "serverread")
if self.tls_version < 0x0300:
return None
elif self.tls_version == 0x0300:
if s:
tbh = key + client_random + server_random
else:
tbh = key + server_random + client_random
export_key = _tls_hash_algs["MD5"]().digest(tbh)[:req_len]
else:
if s:
tag = b"client write key"
else:
tag = b"server write key"
export_key = self.prf(key,
tag,
client_random + server_random,
req_len)
return export_key
def generate_iv_for_export(self, client_random, server_random,
con_end, read_or_write, req_len):
"""
Generate IV for EXPORT ciphersuite, i.e. weakens it.
An export IV generation example is given in section 6.3.1 of RFC 2246.
See also page 86 of EKR's book.
"""
s = con_end + read_or_write
s = (s == "clientwrite" or s == "serverread")
if self.tls_version < 0x0300:
return None
elif self.tls_version == 0x0300:
if s:
tbh = client_random + server_random
else:
tbh = server_random + client_random
iv = _tls_hash_algs["MD5"]().digest(tbh)[:req_len]
else:
iv_block = self.prf("",
b"IV block",
client_random + server_random,
2 * req_len)
if s:
iv = iv_block[:req_len]
else:
iv = iv_block[req_len:]
return iv<|fim▁end|> |
md5 = _tls_hash_algs["MD5"]()
sha1 = _tls_hash_algs["SHA"]() |
<|file_name|>endpoint-manager.ts<|end_file_name|><|fim▁begin|>import {autoinject, bindable, child, children} from 'aurelia-framework';
import {CoreApiService} from '../../services/api/core-api-service';
import {ApiForm} from '../../services/api/form/api-form';
import {CoreApiModel} from '../../services/api/models/core-api-model';
import Toast from '../../services/helpers/toast';
import {Column} from './column';
@autoinject
export class EndpointManager {
records: any[] = [];
selectedRecords: any[] = [];
@children('column') columns: Column[];
@bindable apiService: CoreApiService;
<|fim▁hole|> @child('[slot=delete-success-message]') deleteSuccessMessage;
@child('[slot=delete-error-message]') deleteErrorMessage;
protected defaultDeleteSuccessMessage: string = 'Record deleted successfully';
protected defaultDeleteErrorMessage: string = 'There was an error deleting the record';
constructor(protected toast: Toast) {}
apiServiceChanged() {
this.getAllRecords();
}
getAllRecords() {
this.apiService.getAll()
.then((records) => this.records = records);
}
deleteSelectedRecords() {
let promises = this.selectedRecords.map((record) => {
return this.apiService.delete(record.id);
});
Promise.all(promises)
.then(() => this.displayDeleteSuccessMessage())
.then(() => this.selectedRecords = [])
.then(null, () => this.displayDeleteErrorMessage())
.then(() => this.getAllRecords());
}
displayDeleteSuccessMessage() {
let message = this.deleteSuccessMessage
&& this.deleteSuccessMessage.innerText
|| this.defaultDeleteSuccessMessage;
this.toast.success(`
<i class="fa fa-check"></i>
${message}
`);
}
displayDeleteErrorMessage() {
let message = this.deleteErrorMessage
&& this.deleteErrorMessage.innerText
|| this.defaultDeleteErrorMessage;
this.toast.error(`
<i class="fa fa-check"></i>
${this.deleteErrorMessage}
`);
}
}<|fim▁end|> | @child('[slot=form]') formControl: ApiForm<CoreApiModel>; |
<|file_name|>jizhidezy.py<|end_file_name|><|fim▁begin|>#coding=utf-8
from PIL import Image#需要pillow库
import glob, os
in_dir ='background'#源图片目录
out_dir = in_dir+'_out'#转换后图片目录
if not os.path.exists(out_dir): os.mkdir(out_dir)
<|fim▁hole|>def main():
for files in glob.glob(in_dir+'/*'):
filepath,filename = os.path.split(files)
im = Image.open(files)
w,h = im.size
im = im.resize((int(1920), int(1.0*h/w*1920)))
im.save(os.path.join(out_dir,filename))
if __name__=='__main__':
main()<|fim▁end|> | #图片批处理 |
<|file_name|>nailgun_client.py<|end_file_name|><|fim▁begin|># coding=utf-8
# Copyright 2014 Pants project contributors (see CONTRIBUTORS.md).
# Licensed under the Apache License, Version 2.0 (see LICENSE).
from __future__ import (absolute_import, division, generators, nested_scopes, print_function,
unicode_literals, with_statement)
import errno
import logging
import os
import signal
import socket
import sys
from pants.java.nailgun_io import NailgunStreamWriter
from pants.java.nailgun_protocol import ChunkType, NailgunProtocol
from pants.util.socket import RecvBufferedSocket
logger = logging.getLogger(__name__)
class NailgunClientSession(NailgunProtocol):
"""Handles a single nailgun client session."""
def __init__(self, sock, in_fd, out_fd, err_fd, exit_on_broken_pipe=False):
self._sock = sock
if in_fd:
self._input_writer = NailgunStreamWriter(in_fd, self._sock,
ChunkType.STDIN, ChunkType.STDIN_EOF)
else:
self._input_writer = None
self._stdout = out_fd
self._stderr = err_fd
self._exit_on_broken_pipe = exit_on_broken_pipe
self.remote_pid = None
def _maybe_start_input_writer(self):
if self._input_writer:
self._input_writer.start()
def _maybe_stop_input_writer(self):
if self._input_writer:
self._input_writer.stop()
def _write_flush(self, fd, payload=None):
"""Write a payload to a given fd (if provided) and flush the fd."""
try:
if payload:
fd.write(payload)
fd.flush()
except (IOError, OSError) as e:
# If a `Broken Pipe` is encountered during a stdio fd write, we're headless - bail.
if e.errno == errno.EPIPE and self._exit_on_broken_pipe:
sys.exit()
# Otherwise, re-raise.
raise
def _process_session(self):
"""Process the outputs of the nailgun session."""<|fim▁hole|> for chunk_type, payload in self.iter_chunks(self._sock, return_bytes=True):
if chunk_type == ChunkType.STDOUT:
self._write_flush(self._stdout, payload)
elif chunk_type == ChunkType.STDERR:
self._write_flush(self._stderr, payload)
elif chunk_type == ChunkType.EXIT:
self._write_flush(self._stdout)
self._write_flush(self._stderr)
return int(payload)
elif chunk_type == ChunkType.PID:
self.remote_pid = int(payload)
elif chunk_type == ChunkType.START_READING_INPUT:
self._maybe_start_input_writer()
else:
raise self.ProtocolError('received unexpected chunk {} -> {}'.format(chunk_type, payload))
finally:
# Bad chunk types received from the server can throw NailgunProtocol.ProtocolError in
# NailgunProtocol.iter_chunks(). This ensures the NailgunStreamWriter is always stopped.
self._maybe_stop_input_writer()
def execute(self, working_dir, main_class, *arguments, **environment):
# Send the nailgun request.
self.send_request(self._sock, working_dir, main_class, *arguments, **environment)
# Process the remainder of the nailgun session.
return self._process_session()
class NailgunClient(object):
"""A python nailgun client (see http://martiansoftware.com/nailgun for more info)."""
class NailgunError(Exception):
"""Indicates an error interacting with a nailgun server."""
class NailgunConnectionError(NailgunError):
"""Indicates an error upon initial connect to the nailgun server."""
# For backwards compatibility with nails expecting the ng c client special env vars.
ENV_DEFAULTS = dict(NAILGUN_FILESEPARATOR=os.sep, NAILGUN_PATHSEPARATOR=os.pathsep)
DEFAULT_NG_HOST = '127.0.0.1'
DEFAULT_NG_PORT = 2113
def __init__(self, host=DEFAULT_NG_HOST, port=DEFAULT_NG_PORT, ins=sys.stdin, out=None, err=None,
workdir=None, exit_on_broken_pipe=False):
"""Creates a nailgun client that can be used to issue zero or more nailgun commands.
:param string host: the nailgun server to contact (defaults to '127.0.0.1')
:param int port: the port the nailgun server is listening on (defaults to the default nailgun
port: 2113)
:param file ins: a file to read command standard input from (defaults to stdin) - can be None
in which case no input is read
:param file out: a stream to write command standard output to (defaults to stdout)
:param file err: a stream to write command standard error to (defaults to stderr)
:param string workdir: the default working directory for all nailgun commands (defaults to CWD)
:param bool exit_on_broken_pipe: whether or not to exit when `Broken Pipe` errors are encountered.
"""
self._host = host
self._port = port
self._stdin = ins
self._stdout = out or sys.stdout
self._stderr = err or sys.stderr
self._workdir = workdir or os.path.abspath(os.path.curdir)
self._exit_on_broken_pipe = exit_on_broken_pipe
self._session = None
def try_connect(self):
"""Creates a socket, connects it to the nailgun and returns the connected socket.
:returns: a connected `socket.socket`.
:raises: `NailgunClient.NailgunConnectionError` on failure to connect.
"""
sock = RecvBufferedSocket(socket.socket(socket.AF_INET, socket.SOCK_STREAM))
try:
sock.connect((self._host, self._port))
except (socket.error, socket.gaierror) as e:
logger.debug('Encountered socket exception {!r} when attempting connect to nailgun'.format(e))
sock.close()
raise self.NailgunConnectionError(
'Problem connecting to nailgun server at {}:{}: {!r}'.format(self._host, self._port, e))
else:
return sock
def send_control_c(self):
"""Sends SIGINT to a nailgun server using pid information from the active session."""
if self._session and self._session.remote_pid is not None:
os.kill(self._session.remote_pid, signal.SIGINT)
def execute(self, main_class, cwd=None, *args, **environment):
"""Executes the given main_class with any supplied args in the given environment.
:param string main_class: the fully qualified class name of the main entrypoint
:param string cwd: Set the working directory for this command
:param list args: any arguments to pass to the main entrypoint
:param dict environment: an env mapping made available to native nails via the nail context
:returns: the exit code of the main_class.
"""
environment = dict(self.ENV_DEFAULTS.items() + environment.items())
cwd = cwd or self._workdir
# N.B. This can throw NailgunConnectionError (catchable via NailgunError).
sock = self.try_connect()
self._session = NailgunClientSession(sock,
self._stdin,
self._stdout,
self._stderr,
self._exit_on_broken_pipe)
try:
return self._session.execute(cwd, main_class, *args, **environment)
except socket.error as e:
raise self.NailgunError('Problem communicating with nailgun server at {}:{}: {!r}'
.format(self._host, self._port, e))
except NailgunProtocol.ProtocolError as e:
raise self.NailgunError('Problem in nailgun protocol with nailgun server at {}:{}: {!r}'
.format(self._host, self._port, e))
finally:
sock.close()
self._session = None
def __repr__(self):
return 'NailgunClient(host={!r}, port={!r}, workdir={!r})'.format(self._host,
self._port,
self._workdir)<|fim▁end|> | try: |
<|file_name|>mqtt.py<|end_file_name|><|fim▁begin|># coding=utf-8
# Licensed Materials - Property of IBM
# Copyright IBM Corp. 2016
"""
Publish and subscribe to MQTT messages.
Additional information at http://mqtt.org and
http://ibmstreams.github.io/streamsx.messaging
"""
from future.builtins import *
from streamsx.topology.topology import *
from streamsx.topology import schema
class MqttStreams(object):
"""
A simple connector to a MQTT broker for publishing
string tuples to MQTT topics, and
subscribing to MQTT topics and creating streams.<|fim▁hole|>
A connector is for a specific MQTT Broker as specified in
the configuration object config. Any number of publish()and subscribe()
connections may be created from a single mqtt_streams connector.
Sample use:
::
topo = Topology("An MQTT application")
# define configuration information
config = {}
config['clientID'] = "test_MQTTpublishClient"
config['qos'] = int("1") #(needs to be int vs long)
config['keepAliveInterval'] = int(20) (needs to be int vs long)
config['commandTimeout'] = 30000 (needs to be int vs long)
config['period'] = 5000 (needs to be int vs long)
config['messageQueueSize'] = 10 (needs to be int vs long)
config['reconnectionBound'] = int(20)
config['retain'] = True
config['password'] = "foobar"
config['trustStore'] = "/tmp/no-such-trustStore"
config['trustStorePassword'] = "woohoo"
config['keyStore'] = "/tmp/no-such-keyStore"
config['keyStorePassword'] = "woohoo"
# create the connector's configuration property map
config['serverURI'] = "tcp://localhost:1883"
config['userID'] = "user1id"
config[' password'] = "user1passwrd"
# create the connector
mqstream = MqttStreams(topo, config)
# publish a python source stream to the topic "python.topic1"
topic = "python.topic1"
src = topo.source(test_functions.mqtt_publish)
mqs = mqstream.publish(src, topic)
# subscribe to the topic "python.topic1"
topic = ["python.topic1", ]
mqs = mqstream.subscribe(topic)
mqs.print()
Configuration properties apply to publish and
subscribe unless stated otherwise.
serverURI
Required String. URI to the MQTT server, either
tcp://<hostid>[:<port>]}
or ssl://<hostid>[:<port>]}.
The port defaults to 1883 for "tcp:" and 8883 for "ssl:" URIs.
clientID
Optional String. A unique identifier for a connection
to the MQTT server.
he MQTT broker only allows a single
onnection for a particular clientID.
By default a unique client ID is automatically
generated for each use of publish() and subscribe().
The specified clientID is used for the first
publish() or subscribe() use and
suffix is added for each subsequent uses.
keepAliveInterval
Optional Integer. Automatically generate a MQTT
ping message to the server if a message or ping hasn't been
sent or received in the last keelAliveInterval seconds.
Enables the client to detect if the server is no longer available
without having to wait for the TCP/IP timeout.
A value of 0 disables keepalive processing.
The default is 60.
commandTimeout
Optional Long. The maximum time in milliseconds
to wait for a MQTT connect or publish action to complete.
A value of 0 causes the client to wait indefinitely.
The default is 0.
period
Optional Long. The time in milliseconds before
attempting to reconnect to the server following a connection failure.
The default is 60000.
userID
Optional String. The identifier to use when authenticating
with a server configured to require that form of authentication.
password
Optional String. The identifier to use when authenticating
with server configured to require that form of authentication.
trustStore
Optional String. The pathname to a file containing the
public certificate of trusted MQTT servers. If a relative path
is specified, the path is relative to the application directory.
Required when connecting to a MQTT server with an
ssl:/... serverURI.
trustStorePassword
Required String when trustStore is used.
The password needed to access the encrypted trustStore file.
keyStore
Optional String. The pathname to a file containing the
MQTT client's public private key certificates.
If a relative path is specified, the path is relative to the
application directory.
Required when an MQTT server is configured to use SSL client authentication.
keyStorePassword
Required String when keyStore is used.
The password needed to access the encrypted keyStore file.
messageQueueSize
[subscribe] Optional Integer. The size, in number
of messages, of the subscriber's internal receive buffer. Received
messages are added to the buffer prior to being converted to a
stream tuple. The receiver blocks when the buffer is full.
The default is 50.
retain
[publish] Optional Boolean. Indicates if messages should be
retained on the MQTT server. Default is false.
qos
Optional Integer. The default
MQTT quality of service used for message handling.
The default is 0.
"""
def __init__(self, topology, config):
self.topology = topology
self.config = config.copy()
self.opCnt = 0
def publish(self, pub_stream, topic):
parms = self.config.copy()
parms['topic'] = topic
parms['dataAttributeName'] = "string"
if (++self.opCnt > 1):
# each op requires its own clientID
clientId = parms['clientID']
if (clientId is not None and len(clientId) > 0):
parms['clientID'] = clientId + "-" + str(id(self)) + "-" + str(self.opCnt)
# convert pub_stream outputport schema from spl po to spl rstring type
forOp = pub_stream._map(streamsx.topology.functions.identity, schema.CommonSchema.String)
op = self.topology.graph.addOperator(kind="com.ibm.streamsx.messaging.mqtt::MQTTSink")
op.addInputPort(outputPort=forOp.oport)
op.setParameters(parms)
return None
def subscribe(self, topic):
parms = self.config.copy()
if (parms['retain'] is not None):
del parms['retain']
parms['topics'] = topic
parms['topicOutAttrName'] = "topic"
parms['dataAttributeName'] = "string"
if (++self.opCnt > 1):
# each op requires its own clientID
clientId = parms['clientID']
if (clientId is not None and len(clientId) > 0):
parms['clientID'] = clientId + "-" + str(id(self)) + "-" + str(self.opCnt)
op = self.topology.graph.addOperator(kind="com.ibm.streamsx.messaging.mqtt::MQTTSource")
oport = op.addOutputPort(schema=schema.StreamSchema("tuple<rstring topic, rstring string>"))
op.setParameters(parms)
pop = self.topology.graph.addPassThruOperator()
pop.addInputPort(outputPort=oport)
pOport = pop.addOutputPort(schema=schema.CommonSchema.String)
return Stream(self.topology, pOport)<|fim▁end|> | |
<|file_name|>stage-details-test.js<|end_file_name|><|fim▁begin|>import { moduleForComponent, test } from 'ember-qunit';
import hbs from 'htmlbars-inline-precompile';
moduleForComponent('stage-details', 'Integration | Component | stage details', {<|fim▁hole|>});
test('it renders', function(assert) {
assert.expect(2);
// Set any properties with this.set('myProperty', 'value');
// Handle any actions with this.on('myAction', function(val) { ... });
this.render(hbs`{{stage-details}}`);
assert.equal(this.$().text().trim(), '');
// Template block usage:
this.render(hbs`
{{#stage-details}}
template block text
{{/stage-details}}
`);
assert.equal(this.$().text().trim(), 'template block text');
});<|fim▁end|> | integration: true |
<|file_name|>jaccard_similarity.py<|end_file_name|><|fim▁begin|>"""
The Jaccard similarity coefficient is a commonly used indicator of the
similarity between two sets. Let U be a set and A and B be subsets of U,
then the Jaccard index/similarity is defined to be the ratio of the number
of elements of their intersection and the number of elements of their union.
Inspired from Wikipedia and<|fim▁hole|>
https://en.wikipedia.org/wiki/Jaccard_index
https://mmds.org
Jaccard similarity is widely used with MinHashing.
"""
def jaccard_similariy(setA, setB, alternativeUnion=False):
"""
Finds the jaccard similarity between two sets.
Essentially, its intersection over union.
The alternative way to calculate this is to take union as sum of the
number of items in the two sets. This will lead to jaccard similarity
of a set with itself be 1/2 instead of 1. [MMDS 2nd Edition, Page 77]
Parameters:
:setA (set,list,tuple): A non-empty set/list
:setB (set,list,tuple): A non-empty set/list
:alternativeUnion (boolean): If True, use sum of number of
items as union
Output:
(float) The jaccard similarity between the two sets.
Examples:
>>> setA = {'a', 'b', 'c', 'd', 'e'}
>>> setB = {'c', 'd', 'e', 'f', 'h', 'i'}
>>> jaccard_similariy(setA,setB)
0.375
>>> jaccard_similariy(setA,setA)
1.0
>>> jaccard_similariy(setA,setA,True)
0.5
>>> setA = ['a', 'b', 'c', 'd', 'e']
>>> setB = ('c', 'd', 'e', 'f', 'h', 'i')
>>> jaccard_similariy(setA,setB)
0.375
"""
if isinstance(setA, set) and isinstance(setB, set):
intersection = len(setA.intersection(setB))
if alternativeUnion:
union = len(setA) + len(setB)
else:
union = len(setA.union(setB))
return intersection / union
if isinstance(setA, (list, tuple)) and isinstance(setB, (list, tuple)):
intersection = [element for element in setA if element in setB]
if alternativeUnion:
union = len(setA) + len(setB)
else:
union = setA + [element for element in setB if element not in setA]
return len(intersection) / len(union)
if __name__ == "__main__":
setA = {"a", "b", "c", "d", "e"}
setB = {"c", "d", "e", "f", "h", "i"}
print(jaccard_similariy(setA, setB))<|fim▁end|> | the book Mining of Massive Datasets [MMDS 2nd Edition, Chapter 3] |
<|file_name|>popup.js<|end_file_name|><|fim▁begin|>setTimeout(function() {
$('.color-box').colpick({
layout:'hex',
submit:0,
colorScheme:'dark',
onChange:function(hsb,hex,rgb,el,bySetColor) {
$(el).css('background','#'+hex);
// Fill the text box just if the color was set using the picker, and not the colpickSetColor function.
if(!bySetColor) $(el).val(hex);
}
});
$("#batch-input").click(function () {
$("#batch").fadeIn("fast");
$(".popup-background").fadeIn("fast");
});
$(".close-popup").click(function () {
$(".popup").fadeOut("fast");
$(".popup-background").fadeOut("fast");
});
$("a.debug").click(function () {
$("#debug").css("visibility", "visible");
});
$("a.save-version").click(function () {
$("#save").fadeIn("fast");
$(".popup-background").fadeIn("fast");
});
$("a.leave").click(function () {
$("#message").fadeIn("fast");
$(".popup-background").fadeIn("fast");
});
$("a.topline").click(function () {
$("#topline").fadeIn("fast");
$("#topline").delay( 5000 ).fadeOut("fast");<|fim▁hole|> $("#markdown").fadeIn("fast");
$(".popup-background").fadeIn("fast");
})
$("a.search-button").click(function () {
$("#search").css("visibility", "visible");
})
},1000);<|fim▁end|> | });
$("a.win-edit").click(function () { |
<|file_name|>rtl8139.rs<|end_file_name|><|fim▁begin|>use alloc::boxed::Box;
use arch::memory;
use collections::slice;
use collections::string::ToString;
use collections::vec::Vec;
use collections::vec_deque::VecDeque;
use core::ptr;
use common::debug;
use drivers::pci::config::PciConfig;
use drivers::io::{Io, Pio};
use network::common::*;
use network::scheme::*;
use fs::{KScheme, Resource, Url};
use syscall::Result;
use sync::Intex;
const RTL8139_TSR_OWN: u32 = 1 << 13;
const RTL8139_CR_RST: u8 = 1 << 4;
const RTL8139_CR_RE: u8 = 1 << 3;
const RTL8139_CR_TE: u8 = 1 << 2;
const RTL8139_CR_BUFE: u8 = 1 << 0;
const RTL8139_ISR_SERR: u16 = 1 << 15;
const RTL8139_ISR_TIMEOUT: u16 = 1 << 14;
const RTL8139_ISR_LENCHG: u16 = 1 << 13;
const RTL8139_ISR_FOVW: u16 = 1 << 6;
const RTL8139_ISR_PUN_LINKCHG: u16 = 1 << 5;
const RTL8139_ISR_RXOVW: u16 = 1 << 4;
const RTL8139_ISR_TER: u16 = 1 << 3;
const RTL8139_ISR_TOK: u16 = 1 << 2;
const RTL8139_ISR_RER: u16 = 1 << 1;
const RTL8139_ISR_ROK: u16 = 1 << 0;
const RTL8139_TCR_IFG: u32 = 0b11 << 24;
const RTL8139_RCR_WRAP: u32 = 1 << 7;
const RTL8139_RCR_AR: u32 = 1 << 4;
const RTL8139_RCR_AB: u32 = 1 << 3;
const RTL8139_RCR_AM: u32 = 1 << 2;
const RTL8139_RCR_APM: u32 = 1 << 1;
#[repr(packed)]
struct Txd {
pub address_port: Pio<u32>,
pub status_port: Pio<u32>,
pub buffer: usize,
}
pub struct Rtl8139Port {
pub idr: [Pio<u8>; 6],
pub rbstart: Pio<u32>,
pub cr: Pio<u8>,
pub capr: Pio<u16>,
pub cbr: Pio<u16>,
pub imr: Pio<u16>,
pub isr: Pio<u16>,
pub tcr: Pio<u32>,
pub rcr: Pio<u32>,
pub config1: Pio<u8>,
}
impl Rtl8139Port {
pub fn new(base: u16) -> Self {
return Rtl8139Port {
idr: [Pio::<u8>::new(base + 0x00),
Pio::<u8>::new(base + 0x01),
Pio::<u8>::new(base + 0x02),
Pio::<u8>::new(base + 0x03),
Pio::<u8>::new(base + 0x04),
Pio::<u8>::new(base + 0x05)],
rbstart: Pio::<u32>::new(base + 0x30),
cr: Pio::<u8>::new(base + 0x37),
capr: Pio::<u16>::new(base + 0x38),
cbr: Pio::<u16>::new(base + 0x3A),
imr: Pio::<u16>::new(base + 0x3C),
isr: Pio::<u16>::new(base + 0x3E),
tcr: Pio::<u32>::new(base + 0x40),
rcr: Pio::<u32>::new(base + 0x44),
config1: Pio::<u8>::new(base + 0x52),
};
}
}
pub struct Rtl8139 {
pci: PciConfig,
base: usize,
memory_mapped: bool,
irq: u8,
resources: Intex<Vec<*mut NetworkResource>>,
inbound: VecDeque<Vec<u8>>,
outbound: VecDeque<Vec<u8>>,
txds: Vec<Txd>,
txd_i: usize,
port: Rtl8139Port,
}
impl Rtl8139 {
pub fn new(mut pci: PciConfig) -> Box<Self> {
let pci_id = unsafe { pci.read(0x00) };
let revision = (unsafe { pci.read(0x08) } & 0xFF) as u8;
if pci_id == 0x813910EC && revision < 0x20 {
debugln!("Not an 8139C+ compatible chip")
}
let base = unsafe { pci.read(0x10) as usize };
let irq = unsafe { pci.read(0x3C) as u8 & 0xF };
let mut module = box Rtl8139 {
pci: pci,
base: base & 0xFFFFFFF0,
memory_mapped: base & 1 == 0,
irq: irq,
resources: Intex::new(Vec::new()),
inbound: VecDeque::new(),<|fim▁hole|> port: Rtl8139Port::new((base & 0xFFFFFFF0) as u16),
};
unsafe { module.init() };
module
}
unsafe fn init(&mut self) {
debug::d("RTL8139 on: ");
debug::dh(self.base);
if self.memory_mapped {
debug::d(" memory mapped");
} else {
debug::d(" port mapped");
}
debug::d(" IRQ: ");
debug::dbh(self.irq);
self.pci.flag(4, 4, true); // Bus mastering
let base = self.base as u16;
self.port.config1.write(0);
self.port.cr.write(RTL8139_CR_RST);
while self.port.cr.read() & RTL8139_CR_RST != 0 {}
debug::d(" MAC: ");
MAC_ADDR = MacAddr {
bytes: [self.port.idr[0].read(),
self.port.idr[1].read(),
self.port.idr[2].read(),
self.port.idr[3].read(),
self.port.idr[4].read(),
self.port.idr[5].read()],
};
debug::d(&MAC_ADDR.to_string());
let receive_buffer = memory::alloc(10240);
self.port.rbstart.write(receive_buffer as u32);
for i in 0..4 {
self.txds.push(Txd {
address_port: Pio::<u32>::new(base + 0x20 + (i as u16) * 4),
status_port: Pio::<u32>::new(base + 0x10 + (i as u16) * 4),
buffer: memory::alloc(4096),
});
}
self.port.imr.write(RTL8139_ISR_TOK | RTL8139_ISR_ROK);
debug::d(" IMR: ");
debug::dh(self.port.imr.read() as usize);
self.port.cr.write(RTL8139_CR_RE | RTL8139_CR_TE);
debug::d(" CMD: ");
debug::dbh(self.port.cr.read());
self.port.rcr.write(RTL8139_RCR_WRAP | RTL8139_RCR_AR | RTL8139_RCR_AB | RTL8139_RCR_AM |
RTL8139_RCR_APM);
debug::d(" RCR: ");
debug::dh(self.port.rcr.read() as usize);
self.port.tcr.writef(RTL8139_TCR_IFG, true);
debug::d(" TCR: ");
debug::dh(self.port.tcr.read() as usize);
debug::dl();
}
unsafe fn receive_inbound(&mut self) {
let receive_buffer = self.port.rbstart.read() as usize;
let mut capr = (self.port.capr.read() + 16) as usize;
let cbr = self.port.cbr.read() as usize;
while capr != cbr {
let frame_addr = receive_buffer + capr + 4;
let frame_status = ptr::read((receive_buffer + capr) as *const u16) as usize;
let frame_len = ptr::read((receive_buffer + capr + 2) as *const u16) as usize;
debug::d("Recv ");
debug::dh(capr as usize);
debug::d(" ");
debug::dh(frame_status);
debug::d(" ");
debug::dh(frame_addr);
debug::d(" ");
debug::dh(frame_len);
debug::dl();
self.inbound.push_back(Vec::from(slice::from_raw_parts(frame_addr as *const u8, frame_len - 4)));
capr = capr + frame_len + 4;
capr = (capr + 3) & (0xFFFFFFFF - 3);
if capr >= 8192 {
capr -= 8192
}
self.port.capr.write((capr as u16) - 16);
}
}
unsafe fn send_outbound(&mut self) {
while let Some(bytes) = self.outbound.pop_front() {
if let Some(ref mut txd) = self.txds.get_mut(self.txd_i) {
if bytes.len() < 4096 {
while !txd.status_port.readf(RTL8139_TSR_OWN) {}
debug::d("Send ");
debug::dh(self.txd_i as usize);
debug::d(" ");
debug::dh(txd.status_port.read() as usize);
debug::d(" ");
debug::dh(txd.buffer);
debug::d(" ");
debug::dh(bytes.len() & 0xFFF);
debug::dl();
::memcpy(txd.buffer as *mut u8, bytes.as_ptr(), bytes.len());
txd.address_port.write(txd.buffer as u32);
txd.status_port.write(bytes.len() as u32 & 0xFFF);
self.txd_i = (self.txd_i + 1) % 4;
} else {
debug::dl();
debug::d("RTL8139: Frame too long for transmit: ");
debug::dd(bytes.len());
debug::dl();
}
} else {
debug::d("RTL8139: TXD Overflow!\n");
self.txd_i = 0;
}
}
}
}
impl KScheme for Rtl8139 {
fn scheme(&self) -> &str {
"network"
}
fn open(&mut self, _: Url, _: usize) -> Result<Box<Resource>> {
Ok(NetworkResource::new(self))
}
fn on_irq(&mut self, irq: u8) {
if irq == self.irq {
let isr = self.port.isr.read();
self.port.isr.write(isr);
// dh(isr as usize);
// dl();
self.sync();
}
}
}
impl NetworkScheme for Rtl8139 {
fn add(&mut self, resource: *mut NetworkResource) {
self.resources.lock().push(resource);
}
fn remove(&mut self, resource: *mut NetworkResource) {
let mut resources = self.resources.lock();
let mut i = 0;
while i < resources.len() {
let mut remove = false;
match resources.get(i) {
Some(ptr) => if *ptr == resource {
remove = true;
} else {
i += 1;
},
None => break,
}
if remove {
resources.remove(i);
}
}
}
fn sync(&mut self) {
unsafe {
{
let resources = self.resources.lock();
for resource in resources.iter() {
while let Some(bytes) = (**resource).outbound.lock().pop_front() {
self.outbound.push_back(bytes);
}
}
}
self.send_outbound();
self.receive_inbound();
{
let resources = self.resources.lock();
while let Some(bytes) = self.inbound.pop_front() {
for resource in resources.iter() {
(**resource).inbound.lock().push_back(bytes.clone());
}
}
}
}
}
}<|fim▁end|> | outbound: VecDeque::new(),
txds: Vec::new(),
txd_i: 0, |
<|file_name|>InputErrorSuffix.js<|end_file_name|><|fim▁begin|>import React from 'react';
import PropTypes from 'prop-types';
import Tooltip from '../Tooltip';
import SvgExclamation from '../svg/Exclamation.js';
import styles from './Input.scss';
class InputErrorSuffix extends React.Component {
render() {
return (
<Tooltip<|fim▁hole|> placement="top"
alignment="center"
textAlign="left"
content={this.props.errorMessage}
overlay=""
theme="dark"
maxWidth="230px"
hideDelay={150}
>
<div className={styles.exclamation}><SvgExclamation width={2} height={11}/></div>
</Tooltip>
);
}
}
InputErrorSuffix.propTypes = {
theme: PropTypes.oneOf(['normal', 'paneltitle', 'material', 'amaterial']),
errorMessage: PropTypes.string.isRequired,
focused: PropTypes.bool
};
export default InputErrorSuffix;<|fim▁end|> | dataHook="input-tooltip"
disabled={this.props.errorMessage.length === 0} |
<|file_name|>NatsComponentConfigurer.java<|end_file_name|><|fim▁begin|>/* Generated by camel build tools - do NOT edit this file! */
package org.apache.camel.component.nats;
import java.util.Map;
import org.apache.camel.CamelContext;
import org.apache.camel.spi.GeneratedPropertyConfigurer;
import org.apache.camel.spi.PropertyConfigurerGetter;
import org.apache.camel.util.CaseInsensitiveMap;
import org.apache.camel.support.component.PropertyConfigurerSupport;
/**
* Generated by camel build tools - do NOT edit this file!
*/
@SuppressWarnings("unchecked")
public class NatsComponentConfigurer extends PropertyConfigurerSupport implements GeneratedPropertyConfigurer, PropertyConfigurerGetter {
private static final Map<String, Object> ALL_OPTIONS;
static {
Map<String, Object> map = new CaseInsensitiveMap();
map.put("servers", java.lang.String.class);
map.put("verbose", boolean.class);
map.put("bridgeErrorHandler", boolean.class);
map.put("lazyStartProducer", boolean.class);
map.put("basicPropertyBinding", boolean.class);
map.put("useGlobalSslContextParameters", boolean.class);
ALL_OPTIONS = map;
}
@Override
public boolean configure(CamelContext camelContext, Object obj, String name, Object value, boolean ignoreCase) {
NatsComponent target = (NatsComponent) obj;
switch (ignoreCase ? name.toLowerCase() : name) {
case "basicpropertybinding":
case "basicPropertyBinding": target.setBasicPropertyBinding(property(camelContext, boolean.class, value)); return true;
case "bridgeerrorhandler":
case "bridgeErrorHandler": target.setBridgeErrorHandler(property(camelContext, boolean.class, value)); return true;
case "lazystartproducer":
case "lazyStartProducer": target.setLazyStartProducer(property(camelContext, boolean.class, value)); return true;
case "servers": target.setServers(property(camelContext, java.lang.String.class, value)); return true;
case "useglobalsslcontextparameters":
case "useGlobalSslContextParameters": target.setUseGlobalSslContextParameters(property(camelContext, boolean.class, value)); return true;
case "verbose": target.setVerbose(property(camelContext, boolean.class, value)); return true;
default: return false;
}
}
@Override
public Map<String, Object> getAllOptions(Object target) {
return ALL_OPTIONS;
}
@Override
public Object getOptionValue(Object obj, String name, boolean ignoreCase) {
NatsComponent target = (NatsComponent) obj;
switch (ignoreCase ? name.toLowerCase() : name) {
case "basicpropertybinding":
case "basicPropertyBinding": return target.isBasicPropertyBinding();
case "bridgeerrorhandler":
case "bridgeErrorHandler": return target.isBridgeErrorHandler();
case "lazystartproducer":<|fim▁hole|> case "verbose": return target.isVerbose();
default: return null;
}
}
}<|fim▁end|> | case "lazyStartProducer": return target.isLazyStartProducer();
case "servers": return target.getServers();
case "useglobalsslcontextparameters":
case "useGlobalSslContextParameters": return target.isUseGlobalSslContextParameters(); |
<|file_name|>http_loader.rs<|end_file_name|><|fim▁begin|>/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at https://mozilla.org/MPL/2.0/. */
use crate::fetch;
use crate::fetch_with_context;
use crate::make_server;
use crate::new_fetch_context;
use cookie_rs::Cookie as CookiePair;
use crossbeam_channel::{unbounded, Receiver};
use devtools_traits::HttpRequest as DevtoolsHttpRequest;
use devtools_traits::HttpResponse as DevtoolsHttpResponse;
use devtools_traits::{ChromeToDevtoolsControlMsg, DevtoolsControlMsg, NetworkEvent};
use flate2::write::{DeflateEncoder, GzEncoder};
use flate2::Compression;
use futures::{self, Future, Stream};
use headers::authorization::Basic;
use headers::{
AccessControlAllowOrigin, Authorization, ContentLength, Date, HeaderMapExt, Host, Origin,
StrictTransportSecurity, UserAgent,
};
use http::header::{self, HeaderMap, HeaderValue};
use http::uri::Authority;
use http::{Method, StatusCode};
use hyper::body::Body;
use hyper::{Request as HyperRequest, Response as HyperResponse};
use msg::constellation_msg::TEST_PIPELINE_ID;
use net::cookie::Cookie;
use net::cookie_storage::CookieStorage;
use net::http_loader::determine_request_referrer;
use net::resource_thread::AuthCacheEntry;
use net::test::replace_host_table;
use net_traits::request::{CredentialsMode, Destination, RequestBuilder, RequestMode};
use net_traits::response::ResponseBody;
use net_traits::{CookieSource, NetworkError, ReferrerPolicy};
use servo_url::{ImmutableOrigin, ServoUrl};
use std::collections::HashMap;
use std::io::Write;
use std::str;
use std::sync::atomic::{AtomicBool, Ordering};
use std::sync::{Arc, Mutex, RwLock};
use std::time::Duration;
fn mock_origin() -> ImmutableOrigin {
ServoUrl::parse("http://servo.org").unwrap().origin()
}
fn read_response(req: HyperRequest<Body>) -> impl Future<Item = String, Error = ()> {
req.into_body()
.concat2()
.and_then(|body| futures::future::ok(str::from_utf8(&body).unwrap().to_owned()))
.map_err(|_| ())
}
fn assert_cookie_for_domain(
cookie_jar: &RwLock<CookieStorage>,
domain: &str,
cookie: Option<&str>,
) {
let mut cookie_jar = cookie_jar.write().unwrap();
let url = ServoUrl::parse(&*domain).unwrap();
let cookies = cookie_jar.cookies_for_url(&url, CookieSource::HTTP);
assert_eq!(cookies.as_ref().map(|c| &**c), cookie);
}
pub fn expect_devtools_http_request(
devtools_port: &Receiver<DevtoolsControlMsg>,
) -> DevtoolsHttpRequest {
match devtools_port.recv().unwrap() {
DevtoolsControlMsg::FromChrome(ChromeToDevtoolsControlMsg::NetworkEvent(_, net_event)) => {
match net_event {
NetworkEvent::HttpRequest(httprequest) => httprequest,
_ => panic!("No HttpRequest Received"),
}
},
_ => panic!("No HttpRequest Received"),
}
}
pub fn expect_devtools_http_response(
devtools_port: &Receiver<DevtoolsControlMsg>,
) -> DevtoolsHttpResponse {
match devtools_port.recv().unwrap() {
DevtoolsControlMsg::FromChrome(ChromeToDevtoolsControlMsg::NetworkEvent(
_,
net_event_response,
)) => match net_event_response {
NetworkEvent::HttpResponse(httpresponse) => httpresponse,
_ => panic!("No HttpResponse Received"),
},
_ => panic!("No HttpResponse Received"),
}
}
#[test]
fn test_check_default_headers_loaded_in_every_request() {
let expected_headers = Arc::new(Mutex::new(None));
let expected_headers_clone = expected_headers.clone();
let handler = move |request: HyperRequest<Body>, _: &mut HyperResponse<Body>| {
assert_eq!(
request.headers().clone(),
expected_headers_clone.lock().unwrap().take().unwrap()
);
};
let (server, url) = make_server(handler);
let mut headers = HeaderMap::new();
headers.insert(
header::ACCEPT_ENCODING,
HeaderValue::from_static("gzip, deflate, br"),
);
headers.typed_insert(Host::from(
format!("{}:{}", url.host_str().unwrap(), url.port().unwrap())
.parse::<Authority>()
.unwrap(),
));
headers.insert(
header::ACCEPT,
HeaderValue::from_static(
"text/html, application/xhtml+xml, application/xml; q=0.9, */*; q=0.8",
),
);
headers.insert(
header::ACCEPT_LANGUAGE,
HeaderValue::from_static("en-US, en; q=0.5"),
);
headers.typed_insert::<UserAgent>(crate::DEFAULT_USER_AGENT.parse().unwrap());
*expected_headers.lock().unwrap() = Some(headers.clone());
// Testing for method.GET
let mut request = RequestBuilder::new(url.clone())
.method(Method::GET)
.destination(Destination::Document)
.origin(url.clone().origin())
.pipeline_id(Some(TEST_PIPELINE_ID))
.build();
let response = fetch(&mut request, None);
assert!(response
.internal_response
.unwrap()
.status
.unwrap()
.0
.is_success());
// Testing for method.POST
let mut post_headers = headers.clone();
post_headers.typed_insert(ContentLength(0 as u64));
let url_str = url.as_str();
// request gets header "Origin: http://example.com" but expected_headers has
// "Origin: http://example.com/" which do not match for equality so strip trailing '/'
post_headers.insert(
header::ORIGIN,
HeaderValue::from_str(&url_str[..url_str.len() - 1]).unwrap(),
);
*expected_headers.lock().unwrap() = Some(post_headers);
let mut request = RequestBuilder::new(url.clone())
.method(Method::POST)
.destination(Destination::Document)
.origin(url.clone().origin())
.pipeline_id(Some(TEST_PIPELINE_ID))
.build();
let response = fetch(&mut request, None);
assert!(response
.internal_response
.unwrap()
.status
.unwrap()
.0
.is_success());
let _ = server.close();
}
#[test]
fn test_load_when_request_is_not_get_or_head_and_there_is_no_body_content_length_should_be_set_to_0(
) {
let handler = move |request: HyperRequest<Body>, _: &mut HyperResponse<Body>| {
assert_eq!(
request.headers().typed_get::<ContentLength>(),
Some(ContentLength(0))
);
};
let (server, url) = make_server(handler);
let mut request = RequestBuilder::new(url.clone())
.method(Method::POST)
.body(None)
.destination(Destination::Document)
.origin(mock_origin())
.pipeline_id(Some(TEST_PIPELINE_ID))
.build();
let response = fetch(&mut request, None);
assert!(response
.internal_response
.unwrap()
.status
.unwrap()
.0
.is_success());
let _ = server.close();
}
#[test]
fn test_request_and_response_data_with_network_messages() {
let handler = move |_: HyperRequest<Body>, response: &mut HyperResponse<Body>| {
response
.headers_mut()
.typed_insert(Host::from("foo.bar".parse::<Authority>().unwrap()));
*response.body_mut() = b"Yay!".to_vec().into();
};
let (server, url) = make_server(handler);
let mut request_headers = HeaderMap::new();
request_headers.typed_insert(Host::from("bar.foo".parse::<Authority>().unwrap()));
let mut request = RequestBuilder::new(url.clone())
.method(Method::GET)
.headers(request_headers)
.body(None)
.destination(Destination::Document)
.origin(mock_origin())
.pipeline_id(Some(TEST_PIPELINE_ID))
.build();
let (devtools_chan, devtools_port) = unbounded();
let response = fetch(&mut request, Some(devtools_chan));
assert!(response
.internal_response
.unwrap()
.status
.unwrap()
.0
.is_success());
let _ = server.close();
// notification received from devtools
let devhttprequest = expect_devtools_http_request(&devtools_port);
let devhttpresponse = expect_devtools_http_response(&devtools_port);
//Creating default headers for request
let mut headers = HeaderMap::new();
headers.insert(
header::ACCEPT_ENCODING,
HeaderValue::from_static("gzip, deflate, br"),
);
headers.insert(
header::ACCEPT,
HeaderValue::from_static(
"text/html, application/xhtml+xml, application/xml; q=0.9, */*; q=0.8",
),
);
headers.insert(
header::ACCEPT_LANGUAGE,
HeaderValue::from_static("en-US, en; q=0.5"),
);
headers.typed_insert::<UserAgent>(crate::DEFAULT_USER_AGENT.parse().unwrap());
let httprequest = DevtoolsHttpRequest {
url: url,
method: Method::GET,
headers: headers,
body: Some(b"".to_vec()),
pipeline_id: TEST_PIPELINE_ID,
startedDateTime: devhttprequest.startedDateTime,
timeStamp: devhttprequest.timeStamp,
connect_time: devhttprequest.connect_time,
send_time: devhttprequest.send_time,
is_xhr: false,
};
let content = "Yay!";
let mut response_headers = HeaderMap::new();
response_headers.typed_insert(ContentLength(content.len() as u64));
response_headers.typed_insert(Host::from("foo.bar".parse::<Authority>().unwrap()));
response_headers.typed_insert(
devhttpresponse
.headers
.as_ref()
.unwrap()
.typed_get::<Date>()
.unwrap()
.clone(),
);
let httpresponse = DevtoolsHttpResponse {
headers: Some(response_headers),
status: Some((200, b"OK".to_vec())),
body: None,
pipeline_id: TEST_PIPELINE_ID,
};
assert_eq!(devhttprequest, httprequest);
assert_eq!(devhttpresponse, httpresponse);
}
#[test]
fn test_request_and_response_message_from_devtool_without_pipeline_id() {
let handler = move |_: HyperRequest<Body>, response: &mut HyperResponse<Body>| {
response
.headers_mut()
.typed_insert(Host::from("foo.bar".parse::<Authority>().unwrap()));
*response.body_mut() = b"Yay!".to_vec().into();
};
let (server, url) = make_server(handler);
let mut request = RequestBuilder::new(url.clone())
.method(Method::GET)
.destination(Destination::Document)
.origin(mock_origin())
.pipeline_id(None)
.build();
let (devtools_chan, devtools_port) = unbounded();
let response = fetch(&mut request, Some(devtools_chan));
assert!(response
.internal_response
.unwrap()
.status
.unwrap()
.0
.is_success());
let _ = server.close();
// notification received from devtools
assert!(devtools_port.try_recv().is_err());
}
#[test]
fn test_redirected_request_to_devtools() {
let post_handler = move |request: HyperRequest<Body>, response: &mut HyperResponse<Body>| {
assert_eq!(request.method(), Method::GET);
*response.body_mut() = b"Yay!".to_vec().into();
};
let (post_server, post_url) = make_server(post_handler);
let post_redirect_url = post_url.clone();
let pre_handler = move |request: HyperRequest<Body>, response: &mut HyperResponse<Body>| {
assert_eq!(request.method(), Method::POST);
response.headers_mut().insert(
header::LOCATION,
HeaderValue::from_str(&post_redirect_url.to_string()).unwrap(),
);
*response.status_mut() = StatusCode::MOVED_PERMANENTLY;
};
let (pre_server, pre_url) = make_server(pre_handler);
let mut request = RequestBuilder::new(pre_url.clone())
.method(Method::POST)
.destination(Destination::Document)
.pipeline_id(Some(TEST_PIPELINE_ID))
.build();
let (devtools_chan, devtools_port) = unbounded();
fetch(&mut request, Some(devtools_chan));
let _ = pre_server.close();
let _ = post_server.close();
let devhttprequest = expect_devtools_http_request(&devtools_port);
let devhttpresponse = expect_devtools_http_response(&devtools_port);
assert_eq!(devhttprequest.method, Method::POST);
assert_eq!(devhttprequest.url, pre_url);
assert_eq!(
devhttpresponse.status,
Some((301, b"Moved Permanently".to_vec()))
);
let devhttprequest = expect_devtools_http_request(&devtools_port);
let devhttpresponse = expect_devtools_http_response(&devtools_port);
assert_eq!(devhttprequest.method, Method::GET);
assert_eq!(devhttprequest.url, post_url);
assert_eq!(devhttpresponse.status, Some((200, b"OK".to_vec())));
}
#[test]
fn test_load_when_redirecting_from_a_post_should_rewrite_next_request_as_get() {
let post_handler = move |request: HyperRequest<Body>, response: &mut HyperResponse<Body>| {
assert_eq!(request.method(), Method::GET);
*response.body_mut() = b"Yay!".to_vec().into();
};
let (post_server, post_url) = make_server(post_handler);
let post_redirect_url = post_url.clone();
let pre_handler = move |request: HyperRequest<Body>, response: &mut HyperResponse<Body>| {
assert_eq!(request.method(), Method::POST);
response.headers_mut().insert(
header::LOCATION,
HeaderValue::from_str(&post_redirect_url.to_string()).unwrap(),
);
*response.status_mut() = StatusCode::MOVED_PERMANENTLY;
};
let (pre_server, pre_url) = make_server(pre_handler);
let mut request = RequestBuilder::new(pre_url.clone())
.method(Method::POST)
.destination(Destination::Document)
.origin(mock_origin())
.pipeline_id(Some(TEST_PIPELINE_ID))
.build();
let response = fetch(&mut request, None);
let _ = pre_server.close();
let _ = post_server.close();
assert!(response.to_actual().status.unwrap().0.is_success());
}
#[test]
fn test_load_should_decode_the_response_as_deflate_when_response_headers_have_content_encoding_deflate(
) {
let handler = move |_: HyperRequest<Body>, response: &mut HyperResponse<Body>| {
response.headers_mut().insert(
header::CONTENT_ENCODING,
HeaderValue::from_static("deflate"),
);
let mut e = DeflateEncoder::new(Vec::new(), Compression::default());
e.write(b"Yay!").unwrap();
let encoded_content = e.finish().unwrap();
*response.body_mut() = encoded_content.into();
};
let (server, url) = make_server(handler);
let mut request = RequestBuilder::new(url.clone())
.method(Method::GET)
.body(None)
.destination(Destination::Document)
.origin(mock_origin())
.pipeline_id(Some(TEST_PIPELINE_ID))
.build();
let response = fetch(&mut request, None);
let _ = server.close();
let internal_response = response.internal_response.unwrap();
assert!(internal_response.status.clone().unwrap().0.is_success());
assert_eq!(
*internal_response.body.lock().unwrap(),
ResponseBody::Done(b"Yay!".to_vec())
);
}
#[test]
fn test_load_should_decode_the_response_as_gzip_when_response_headers_have_content_encoding_gzip() {
let handler = move |_: HyperRequest<Body>, response: &mut HyperResponse<Body>| {
response
.headers_mut()
.insert(header::CONTENT_ENCODING, HeaderValue::from_static("gzip"));
let mut e = GzEncoder::new(Vec::new(), Compression::default());
e.write(b"Yay!").unwrap();
let encoded_content = e.finish().unwrap();
*response.body_mut() = encoded_content.into();
};
let (server, url) = make_server(handler);
let mut request = RequestBuilder::new(url.clone())
.method(Method::GET)
.body(None)
.destination(Destination::Document)
.origin(mock_origin())
.pipeline_id(Some(TEST_PIPELINE_ID))<|fim▁hole|> .build();
let response = fetch(&mut request, None);
let _ = server.close();
let internal_response = response.internal_response.unwrap();
assert!(internal_response.status.clone().unwrap().0.is_success());
assert_eq!(
*internal_response.body.lock().unwrap(),
ResponseBody::Done(b"Yay!".to_vec())
);
}
#[test]
fn test_load_doesnt_send_request_body_on_any_redirect() {
let post_handler = move |request: HyperRequest<Body>, response: &mut HyperResponse<Body>| {
assert_eq!(request.method(), Method::GET);
read_response(request)
.and_then(|data| {
assert_eq!(data, "");
futures::future::ok(())
})
.poll()
.unwrap();
*response.body_mut() = b"Yay!".to_vec().into();
};
let (post_server, post_url) = make_server(post_handler);
let post_redirect_url = post_url.clone();
let pre_handler = move |request: HyperRequest<Body>, response: &mut HyperResponse<Body>| {
read_response(request)
.and_then(|data| {
assert_eq!(data, "Body on POST");
futures::future::ok(())
})
.poll()
.unwrap();
response.headers_mut().insert(
header::LOCATION,
HeaderValue::from_str(&post_redirect_url.to_string()).unwrap(),
);
*response.status_mut() = StatusCode::MOVED_PERMANENTLY;
};
let (pre_server, pre_url) = make_server(pre_handler);
let mut request = RequestBuilder::new(pre_url.clone())
.body(Some(b"Body on POST!".to_vec()))
.method(Method::POST)
.destination(Destination::Document)
.origin(mock_origin())
.pipeline_id(Some(TEST_PIPELINE_ID))
.build();
let response = fetch(&mut request, None);
let _ = pre_server.close();
let _ = post_server.close();
assert!(response.to_actual().status.unwrap().0.is_success());
}
#[test]
fn test_load_doesnt_add_host_to_hsts_list_when_url_is_http_even_if_hsts_headers_are_present() {
let handler = move |_: HyperRequest<Body>, response: &mut HyperResponse<Body>| {
response
.headers_mut()
.typed_insert(StrictTransportSecurity::excluding_subdomains(
Duration::from_secs(31536000),
));
*response.body_mut() = b"Yay!".to_vec().into();
};
let (server, url) = make_server(handler);
let mut request = RequestBuilder::new(url.clone())
.method(Method::GET)
.body(None)
.destination(Destination::Document)
.origin(mock_origin())
.pipeline_id(Some(TEST_PIPELINE_ID))
.build();
let mut context = new_fetch_context(None, None, None);
let response = fetch_with_context(&mut request, &mut context);
let _ = server.close();
assert!(response
.internal_response
.unwrap()
.status
.unwrap()
.0
.is_success());
assert_eq!(
context
.state
.hsts_list
.read()
.unwrap()
.is_host_secure(url.host_str().unwrap()),
false
);
}
#[test]
fn test_load_sets_cookies_in_the_resource_manager_when_it_get_set_cookie_header_in_response() {
let handler = move |_: HyperRequest<Body>, response: &mut HyperResponse<Body>| {
response.headers_mut().insert(
header::SET_COOKIE,
HeaderValue::from_static("mozillaIs=theBest"),
);
*response.body_mut() = b"Yay!".to_vec().into();
};
let (server, url) = make_server(handler);
let mut context = new_fetch_context(None, None, None);
assert_cookie_for_domain(&context.state.cookie_jar, url.as_str(), None);
let mut request = RequestBuilder::new(url.clone())
.method(Method::GET)
.body(None)
.destination(Destination::Document)
.origin(mock_origin())
.pipeline_id(Some(TEST_PIPELINE_ID))
.credentials_mode(CredentialsMode::Include)
.build();
let response = fetch_with_context(&mut request, &mut context);
let _ = server.close();
assert!(response
.internal_response
.unwrap()
.status
.unwrap()
.0
.is_success());
assert_cookie_for_domain(
&context.state.cookie_jar,
url.as_str(),
Some("mozillaIs=theBest"),
);
}
#[test]
fn test_load_sets_requests_cookies_header_for_url_by_getting_cookies_from_the_resource_manager() {
let handler = move |request: HyperRequest<Body>, response: &mut HyperResponse<Body>| {
assert_eq!(
request.headers().get(header::COOKIE).unwrap().as_bytes(),
b"mozillaIs=theBest"
);
*response.body_mut() = b"Yay!".to_vec().into();
};
let (server, url) = make_server(handler);
let mut context = new_fetch_context(None, None, None);
{
let mut cookie_jar = context.state.cookie_jar.write().unwrap();
let cookie = Cookie::new_wrapped(
CookiePair::new("mozillaIs".to_owned(), "theBest".to_owned()),
&url,
CookieSource::HTTP,
)
.unwrap();
cookie_jar.push(cookie, &url, CookieSource::HTTP);
}
let mut request = RequestBuilder::new(url.clone())
.method(Method::GET)
.body(None)
.destination(Destination::Document)
.origin(mock_origin())
.pipeline_id(Some(TEST_PIPELINE_ID))
.credentials_mode(CredentialsMode::Include)
.build();
let response = fetch_with_context(&mut request, &mut context);
let _ = server.close();
assert!(response
.internal_response
.unwrap()
.status
.unwrap()
.0
.is_success());
}
#[test]
fn test_load_sends_cookie_if_nonhttp() {
let handler = move |request: HyperRequest<Body>, response: &mut HyperResponse<Body>| {
assert_eq!(
request.headers().get(header::COOKIE).unwrap().as_bytes(),
b"mozillaIs=theBest"
);
*response.body_mut() = b"Yay!".to_vec().into();
};
let (server, url) = make_server(handler);
let mut context = new_fetch_context(None, None, None);
{
let mut cookie_jar = context.state.cookie_jar.write().unwrap();
let cookie = Cookie::new_wrapped(
CookiePair::new("mozillaIs".to_owned(), "theBest".to_owned()),
&url,
CookieSource::NonHTTP,
)
.unwrap();
cookie_jar.push(cookie, &url, CookieSource::HTTP);
}
let mut request = RequestBuilder::new(url.clone())
.method(Method::GET)
.body(None)
.destination(Destination::Document)
.origin(mock_origin())
.pipeline_id(Some(TEST_PIPELINE_ID))
.credentials_mode(CredentialsMode::Include)
.build();
let response = fetch_with_context(&mut request, &mut context);
let _ = server.close();
assert!(response
.internal_response
.unwrap()
.status
.unwrap()
.0
.is_success());
}
#[test]
fn test_cookie_set_with_httponly_should_not_be_available_using_getcookiesforurl() {
let handler = move |_: HyperRequest<Body>, response: &mut HyperResponse<Body>| {
response.headers_mut().insert(
header::SET_COOKIE,
HeaderValue::from_static("mozillaIs=theBest; HttpOnly"),
);
*response.body_mut() = b"Yay!".to_vec().into();
};
let (server, url) = make_server(handler);
let mut context = new_fetch_context(None, None, None);
assert_cookie_for_domain(&context.state.cookie_jar, url.as_str(), None);
let mut request = RequestBuilder::new(url.clone())
.method(Method::GET)
.body(None)
.destination(Destination::Document)
.origin(mock_origin())
.pipeline_id(Some(TEST_PIPELINE_ID))
.credentials_mode(CredentialsMode::Include)
.build();
let response = fetch_with_context(&mut request, &mut context);
let _ = server.close();
assert!(response
.internal_response
.unwrap()
.status
.unwrap()
.0
.is_success());
assert_cookie_for_domain(
&context.state.cookie_jar,
url.as_str(),
Some("mozillaIs=theBest"),
);
let mut cookie_jar = context.state.cookie_jar.write().unwrap();
assert!(cookie_jar
.cookies_for_url(&url, CookieSource::NonHTTP)
.is_none());
}
#[test]
fn test_when_cookie_received_marked_secure_is_ignored_for_http() {
let handler = move |_: HyperRequest<Body>, response: &mut HyperResponse<Body>| {
response.headers_mut().insert(
header::SET_COOKIE,
HeaderValue::from_static("mozillaIs=theBest; Secure"),
);
*response.body_mut() = b"Yay!".to_vec().into();
};
let (server, url) = make_server(handler);
let mut context = new_fetch_context(None, None, None);
assert_cookie_for_domain(&context.state.cookie_jar, url.as_str(), None);
let mut request = RequestBuilder::new(url.clone())
.method(Method::GET)
.body(None)
.destination(Destination::Document)
.origin(mock_origin())
.pipeline_id(Some(TEST_PIPELINE_ID))
.credentials_mode(CredentialsMode::Include)
.build();
let response = fetch_with_context(&mut request, &mut context);
let _ = server.close();
assert!(response
.internal_response
.unwrap()
.status
.unwrap()
.0
.is_success());
assert_cookie_for_domain(&context.state.cookie_jar, url.as_str(), None);
}
#[test]
fn test_load_sets_content_length_to_length_of_request_body() {
let content = b"This is a request body";
let handler = move |request: HyperRequest<Body>, response: &mut HyperResponse<Body>| {
let content_length = ContentLength(content.len() as u64);
assert_eq!(
request.headers().typed_get::<ContentLength>(),
Some(content_length)
);
*response.body_mut() = content.to_vec().into();
};
let (server, url) = make_server(handler);
let mut request = RequestBuilder::new(url.clone())
.method(Method::POST)
.body(Some(content.to_vec()))
.destination(Destination::Document)
.origin(mock_origin())
.pipeline_id(Some(TEST_PIPELINE_ID))
.build();
let response = fetch(&mut request, None);
let _ = server.close();
assert!(response
.internal_response
.unwrap()
.status
.unwrap()
.0
.is_success());
}
#[test]
fn test_load_uses_explicit_accept_from_headers_in_load_data() {
let handler = move |request: HyperRequest<Body>, response: &mut HyperResponse<Body>| {
assert_eq!(
request
.headers()
.get(header::ACCEPT)
.unwrap()
.to_str()
.unwrap(),
"text/html"
);
*response.body_mut() = b"Yay!".to_vec().into();
};
let (server, url) = make_server(handler);
let mut accept_headers = HeaderMap::new();
accept_headers.insert(header::ACCEPT, HeaderValue::from_static("text/html"));
let mut request = RequestBuilder::new(url.clone())
.method(Method::GET)
.headers(accept_headers)
.destination(Destination::Document)
.origin(mock_origin())
.pipeline_id(Some(TEST_PIPELINE_ID))
.build();
let response = fetch(&mut request, None);
let _ = server.close();
assert!(response
.internal_response
.unwrap()
.status
.unwrap()
.0
.is_success());
}
#[test]
fn test_load_sets_default_accept_to_html_xhtml_xml_and_then_anything_else() {
let handler = move |request: HyperRequest<Body>, response: &mut HyperResponse<Body>| {
assert_eq!(
request
.headers()
.get(header::ACCEPT)
.unwrap()
.to_str()
.unwrap(),
"text/html, application/xhtml+xml, application/xml; q=0.9, */*; q=0.8"
);
*response.body_mut() = b"Yay!".to_vec().into();
};
let (server, url) = make_server(handler);
let mut request = RequestBuilder::new(url.clone())
.method(Method::GET)
.destination(Destination::Document)
.origin(mock_origin())
.pipeline_id(Some(TEST_PIPELINE_ID))
.build();
let response = fetch(&mut request, None);
let _ = server.close();
assert!(response
.internal_response
.unwrap()
.status
.unwrap()
.0
.is_success());
}
#[test]
fn test_load_uses_explicit_accept_encoding_from_load_data_headers() {
let handler = move |request: HyperRequest<Body>, response: &mut HyperResponse<Body>| {
assert_eq!(
request
.headers()
.get(header::ACCEPT_ENCODING)
.unwrap()
.to_str()
.unwrap(),
"chunked"
);
*response.body_mut() = b"Yay!".to_vec().into();
};
let (server, url) = make_server(handler);
let mut accept_encoding_headers = HeaderMap::new();
accept_encoding_headers.insert(header::ACCEPT_ENCODING, HeaderValue::from_static("chunked"));
let mut request = RequestBuilder::new(url.clone())
.method(Method::GET)
.headers(accept_encoding_headers)
.destination(Destination::Document)
.origin(mock_origin())
.pipeline_id(Some(TEST_PIPELINE_ID))
.build();
let response = fetch(&mut request, None);
let _ = server.close();
assert!(response
.internal_response
.unwrap()
.status
.unwrap()
.0
.is_success());
}
#[test]
fn test_load_sets_default_accept_encoding_to_gzip_and_deflate() {
let handler = move |request: HyperRequest<Body>, response: &mut HyperResponse<Body>| {
assert_eq!(
request
.headers()
.get(header::ACCEPT_ENCODING)
.unwrap()
.to_str()
.unwrap(),
"gzip, deflate, br"
);
*response.body_mut() = b"Yay!".to_vec().into();
};
let (server, url) = make_server(handler);
let mut request = RequestBuilder::new(url.clone())
.method(Method::GET)
.destination(Destination::Document)
.origin(mock_origin())
.pipeline_id(Some(TEST_PIPELINE_ID))
.build();
let response = fetch(&mut request, None);
let _ = server.close();
assert!(response
.internal_response
.unwrap()
.status
.unwrap()
.0
.is_success());
}
#[test]
fn test_load_errors_when_there_a_redirect_loop() {
let url_b_for_a = Arc::new(Mutex::new(None::<ServoUrl>));
let url_b_for_a_clone = url_b_for_a.clone();
let handler_a = move |_: HyperRequest<Body>, response: &mut HyperResponse<Body>| {
response.headers_mut().insert(
header::LOCATION,
HeaderValue::from_str(
&url_b_for_a_clone
.lock()
.unwrap()
.as_ref()
.unwrap()
.to_string(),
)
.unwrap(),
);
*response.status_mut() = StatusCode::MOVED_PERMANENTLY;
};
let (server_a, url_a) = make_server(handler_a);
let url_a_for_b = url_a.clone();
let handler_b = move |_: HyperRequest<Body>, response: &mut HyperResponse<Body>| {
response.headers_mut().insert(
header::LOCATION,
HeaderValue::from_str(&url_a_for_b.to_string()).unwrap(),
);
*response.status_mut() = StatusCode::MOVED_PERMANENTLY;
};
let (server_b, url_b) = make_server(handler_b);
*url_b_for_a.lock().unwrap() = Some(url_b.clone());
let mut request = RequestBuilder::new(url_a.clone())
.method(Method::GET)
.destination(Destination::Document)
.origin(mock_origin())
.pipeline_id(Some(TEST_PIPELINE_ID))
.build();
let response = fetch(&mut request, None);
let _ = server_a.close();
let _ = server_b.close();
assert_eq!(
response.get_network_error(),
Some(&NetworkError::Internal("Too many redirects".to_owned()))
);
}
#[test]
fn test_load_succeeds_with_a_redirect_loop() {
let url_b_for_a = Arc::new(Mutex::new(None::<ServoUrl>));
let url_b_for_a_clone = url_b_for_a.clone();
let handled_a = AtomicBool::new(false);
let handler_a = move |_: HyperRequest<Body>, response: &mut HyperResponse<Body>| {
if !handled_a.swap(true, Ordering::SeqCst) {
response.headers_mut().insert(
header::LOCATION,
HeaderValue::from_str(
&url_b_for_a_clone
.lock()
.unwrap()
.as_ref()
.unwrap()
.to_string(),
)
.unwrap(),
);
*response.status_mut() = StatusCode::MOVED_PERMANENTLY;
} else {
*response.body_mut() = b"Success".to_vec().into()
}
};
let (server_a, url_a) = make_server(handler_a);
let url_a_for_b = url_a.clone();
let handler_b = move |_: HyperRequest<Body>, response: &mut HyperResponse<Body>| {
response.headers_mut().insert(
header::LOCATION,
HeaderValue::from_str(&url_a_for_b.to_string()).unwrap(),
);
*response.status_mut() = StatusCode::MOVED_PERMANENTLY;
};
let (server_b, url_b) = make_server(handler_b);
*url_b_for_a.lock().unwrap() = Some(url_b.clone());
let mut request = RequestBuilder::new(url_a.clone())
.method(Method::GET)
.destination(Destination::Document)
.origin(mock_origin())
.pipeline_id(Some(TEST_PIPELINE_ID))
.build();
let response = fetch(&mut request, None);
let _ = server_a.close();
let _ = server_b.close();
let response = response.to_actual();
assert_eq!(response.url_list, [url_a.clone(), url_b, url_a]);
assert_eq!(
*response.body.lock().unwrap(),
ResponseBody::Done(b"Success".to_vec())
);
}
#[test]
fn test_load_follows_a_redirect() {
let post_handler = move |request: HyperRequest<Body>, response: &mut HyperResponse<Body>| {
assert_eq!(request.method(), Method::GET);
*response.body_mut() = b"Yay!".to_vec().into();
};
let (post_server, post_url) = make_server(post_handler);
let post_redirect_url = post_url.clone();
let pre_handler = move |request: HyperRequest<Body>, response: &mut HyperResponse<Body>| {
assert_eq!(request.method(), Method::GET);
response.headers_mut().insert(
header::LOCATION,
HeaderValue::from_str(&post_redirect_url.to_string()).unwrap(),
);
*response.status_mut() = StatusCode::MOVED_PERMANENTLY;
};
let (pre_server, pre_url) = make_server(pre_handler);
let mut request = RequestBuilder::new(pre_url.clone())
.method(Method::GET)
.destination(Destination::Document)
.origin(mock_origin())
.pipeline_id(Some(TEST_PIPELINE_ID))
.build();
let response = fetch(&mut request, None);
let _ = pre_server.close();
let _ = post_server.close();
let internal_response = response.internal_response.unwrap();
assert!(internal_response.status.clone().unwrap().0.is_success());
assert_eq!(
*internal_response.body.lock().unwrap(),
ResponseBody::Done(b"Yay!".to_vec())
);
}
#[test]
fn test_redirect_from_x_to_y_provides_y_cookies_from_y() {
let shared_url_y = Arc::new(Mutex::new(None::<ServoUrl>));
let shared_url_y_clone = shared_url_y.clone();
let handler = move |request: HyperRequest<Body>, response: &mut HyperResponse<Body>| {
let path = request.uri().path();
if path == "/com/" {
assert_eq!(
request.headers().get(header::COOKIE).unwrap().as_bytes(),
b"mozillaIsNot=dotOrg"
);
let location = shared_url_y.lock().unwrap().as_ref().unwrap().to_string();
response.headers_mut().insert(
header::LOCATION,
HeaderValue::from_str(&location.to_string()).unwrap(),
);
*response.status_mut() = StatusCode::MOVED_PERMANENTLY;
} else if path == "/org/" {
assert_eq!(
request.headers().get(header::COOKIE).unwrap().as_bytes(),
b"mozillaIs=theBest"
);
*response.body_mut() = b"Yay!".to_vec().into();
} else {
panic!("unexpected path {:?}", path)
}
};
let (server, url) = make_server(handler);
let port = url.port().unwrap();
assert_eq!(url.host_str(), Some("localhost"));
let ip = "127.0.0.1".parse().unwrap();
let mut host_table = HashMap::new();
host_table.insert("mozilla.com".to_owned(), ip);
host_table.insert("mozilla.org".to_owned(), ip);
replace_host_table(host_table);
let url_x = ServoUrl::parse(&format!("http://mozilla.com:{}/com/", port)).unwrap();
let url_y = ServoUrl::parse(&format!("http://mozilla.org:{}/org/", port)).unwrap();
*shared_url_y_clone.lock().unwrap() = Some(url_y.clone());
let mut context = new_fetch_context(None, None, None);
{
let mut cookie_jar = context.state.cookie_jar.write().unwrap();
let cookie_x = Cookie::new_wrapped(
CookiePair::new("mozillaIsNot".to_owned(), "dotOrg".to_owned()),
&url_x,
CookieSource::HTTP,
)
.unwrap();
cookie_jar.push(cookie_x, &url_x, CookieSource::HTTP);
let cookie_y = Cookie::new_wrapped(
CookiePair::new("mozillaIs".to_owned(), "theBest".to_owned()),
&url_y,
CookieSource::HTTP,
)
.unwrap();
cookie_jar.push(cookie_y, &url_y, CookieSource::HTTP);
}
let mut request = RequestBuilder::new(url_x.clone())
.method(Method::GET)
.destination(Destination::Document)
.origin(mock_origin())
.pipeline_id(Some(TEST_PIPELINE_ID))
.credentials_mode(CredentialsMode::Include)
.build();
let response = fetch_with_context(&mut request, &mut context);
let _ = server.close();
let internal_response = response.internal_response.unwrap();
assert!(internal_response.status.clone().unwrap().0.is_success());
assert_eq!(
*internal_response.body.lock().unwrap(),
ResponseBody::Done(b"Yay!".to_vec())
);
}
#[test]
fn test_redirect_from_x_to_x_provides_x_with_cookie_from_first_response() {
let handler = move |request: HyperRequest<Body>, response: &mut HyperResponse<Body>| {
let path = request.uri().path();
if path == "/initial/" {
response.headers_mut().insert(
header::SET_COOKIE,
HeaderValue::from_static("mozillaIs=theBest; path=/;"),
);
let location = "/subsequent/".to_string();
response.headers_mut().insert(
header::LOCATION,
HeaderValue::from_str(&location.to_string()).unwrap(),
);
*response.status_mut() = StatusCode::MOVED_PERMANENTLY;
} else if path == "/subsequent/" {
assert_eq!(
request.headers().get(header::COOKIE).unwrap().as_bytes(),
b"mozillaIs=theBest"
);
*response.body_mut() = b"Yay!".to_vec().into();
} else {
panic!("unexpected path {:?}", path)
}
};
let (server, url) = make_server(handler);
let url = url.join("/initial/").unwrap();
let mut request = RequestBuilder::new(url.clone())
.method(Method::GET)
.destination(Destination::Document)
.origin(mock_origin())
.pipeline_id(Some(TEST_PIPELINE_ID))
.credentials_mode(CredentialsMode::Include)
.build();
let response = fetch(&mut request, None);
let _ = server.close();
let internal_response = response.internal_response.unwrap();
assert!(internal_response.status.clone().unwrap().0.is_success());
assert_eq!(
*internal_response.body.lock().unwrap(),
ResponseBody::Done(b"Yay!".to_vec())
);
}
#[test]
fn test_if_auth_creds_not_in_url_but_in_cache_it_sets_it() {
let handler = move |request: HyperRequest<Body>, _response: &mut HyperResponse<Body>| {
let expected = Authorization::basic("username", "test");
assert_eq!(
request.headers().typed_get::<Authorization<Basic>>(),
Some(expected)
);
};
let (server, url) = make_server(handler);
let mut request = RequestBuilder::new(url.clone())
.method(Method::GET)
.body(None)
.destination(Destination::Document)
.origin(mock_origin())
.pipeline_id(Some(TEST_PIPELINE_ID))
.credentials_mode(CredentialsMode::Include)
.build();
let mut context = new_fetch_context(None, None, None);
let auth_entry = AuthCacheEntry {
user_name: "username".to_owned(),
password: "test".to_owned(),
};
context
.state
.auth_cache
.write()
.unwrap()
.entries
.insert(url.origin().clone().ascii_serialization(), auth_entry);
let response = fetch_with_context(&mut request, &mut context);
let _ = server.close();
assert!(response
.internal_response
.unwrap()
.status
.unwrap()
.0
.is_success());
}
#[test]
fn test_auth_ui_needs_www_auth() {
let handler = move |_: HyperRequest<Body>, response: &mut HyperResponse<Body>| {
*response.status_mut() = StatusCode::UNAUTHORIZED;
};
let (server, url) = make_server(handler);
let mut request = RequestBuilder::new(url.clone())
.method(Method::GET)
.body(None)
.destination(Destination::Document)
.origin(mock_origin())
.pipeline_id(Some(TEST_PIPELINE_ID))
.credentials_mode(CredentialsMode::Include)
.build();
let response = fetch(&mut request, None);
let _ = server.close();
assert_eq!(
response.internal_response.unwrap().status.unwrap().0,
StatusCode::UNAUTHORIZED
);
}
#[test]
fn test_origin_set() {
let origin_header = Arc::new(Mutex::new(None));
let origin_header_clone = origin_header.clone();
let handler = move |request: HyperRequest<Body>, resp: &mut HyperResponse<Body>| {
let origin_header_clone = origin_header.clone();
resp.headers_mut()
.typed_insert(AccessControlAllowOrigin::ANY);
match request.headers().typed_get::<Origin>() {
None => assert_eq!(origin_header_clone.lock().unwrap().take(), None),
Some(h) => assert_eq!(h, origin_header_clone.lock().unwrap().take().unwrap()),
}
};
let (server, url) = make_server(handler);
let mut origin =
Origin::try_from_parts(url.scheme(), url.host_str().unwrap(), url.port()).unwrap();
*origin_header_clone.lock().unwrap() = Some(origin.clone());
let mut request = RequestBuilder::new(url.clone())
.method(Method::POST)
.body(None)
.origin(url.clone().origin())
.build();
let response = fetch(&mut request, None);
assert!(response
.internal_response
.unwrap()
.status
.unwrap()
.0
.is_success());
let origin_url = ServoUrl::parse("http://example.com").unwrap();
origin =
Origin::try_from_parts(origin_url.scheme(), origin_url.host_str().unwrap(), None).unwrap();
// Test Origin header is set on Get request with CORS mode
let mut request = RequestBuilder::new(url.clone())
.method(Method::GET)
.mode(RequestMode::CorsMode)
.body(None)
.origin(origin_url.clone().origin())
.build();
*origin_header_clone.lock().unwrap() = Some(origin.clone());
let response = fetch(&mut request, None);
assert!(response
.internal_response
.unwrap()
.status
.unwrap()
.0
.is_success());
// Test Origin header is not set on method Head
let mut request = RequestBuilder::new(url.clone())
.method(Method::HEAD)
.body(None)
.origin(url.clone().origin())
.build();
*origin_header_clone.lock().unwrap() = None;
let response = fetch(&mut request, None);
assert!(response
.internal_response
.unwrap()
.status
.unwrap()
.0
.is_success());
let _ = server.close();
}
#[test]
fn test_determine_request_referrer_shorter_than_4k() {
let mut headers = HeaderMap::new();
let referrer_source =
ServoUrl::parse("http://username:[email protected]/such/short/referer?query#fragment")
.unwrap();
let current_url = ServoUrl::parse("http://example.com/current/url").unwrap();
let referer = determine_request_referrer(
&mut headers,
ReferrerPolicy::UnsafeUrl,
referrer_source,
current_url,
);
assert_eq!(
referer.unwrap().as_str(),
"http://example.com/such/short/referer?query"
);
}
#[test]
fn test_determine_request_referrer_longer_than_4k() {
let long_url_str = format!(
"http://username:[email protected]/such/{}/referer?query#fragment",
"long".repeat(1024)
);
let mut headers = HeaderMap::new();
let referrer_source = ServoUrl::parse(&long_url_str).unwrap();
let current_url = ServoUrl::parse("http://example.com/current/url").unwrap();
let referer = determine_request_referrer(
&mut headers,
ReferrerPolicy::UnsafeUrl,
referrer_source,
current_url,
);
assert_eq!(referer.unwrap().as_str(), "http://example.com/");
}<|fim▁end|> | |
<|file_name|>chance.py<|end_file_name|><|fim▁begin|># vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright (c) 2010 OpenStack Foundation
# Copyright 2010 United States Government as represented by the
# Administrator of the National Aeronautics and Space Administration.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
Chance (Random) Scheduler implementation
"""
import random
from oslo.config import cfg
from nova import exception
from nova.scheduler import driver
CONF = cfg.CONF
CONF.import_opt('compute_topic', 'nova.compute.rpcapi')
class ChanceScheduler(driver.Scheduler):
"""Implements Scheduler as a random node selector."""
def _filter_hosts(self, request_spec, hosts, filter_properties):
"""Filter a list of hosts based on request_spec."""
ignore_hosts = filter_properties.get('ignore_hosts', [])
hosts = [host for host in hosts if host not in ignore_hosts]
return hosts
def _schedule(self, context, topic, request_spec, filter_properties):
"""Picks a host that is up at random."""
elevated = context.elevated()
hosts = self.hosts_up(elevated, topic)
if not hosts:
msg = _("Is the appropriate service running?")
raise exception.NoValidHost(reason=msg)
<|fim▁hole|> msg = _("Could not find another compute")
raise exception.NoValidHost(reason=msg)
return random.choice(hosts)
def select_hosts(self, context, request_spec, filter_properties):
"""Selects a set of random hosts."""
hosts = [self._schedule(context, CONF.compute_topic,
request_spec, filter_properties)
for instance_uuid in request_spec.get('instance_uuids', [])]
if not hosts:
raise exception.NoValidHost(reason="")
return hosts
def schedule_run_instance(self, context, request_spec,
admin_password, injected_files,
requested_networks, is_first_time,
filter_properties):
"""Create and run an instance or instances."""
instance_uuids = request_spec.get('instance_uuids')
for num, instance_uuid in enumerate(instance_uuids):
request_spec['instance_properties']['launch_index'] = num
try:
host = self._schedule(context, CONF.compute_topic,
request_spec, filter_properties)
updated_instance = driver.instance_update_db(context,
instance_uuid)
self.compute_rpcapi.run_instance(context,
instance=updated_instance, host=host,
requested_networks=requested_networks,
injected_files=injected_files,
admin_password=admin_password,
is_first_time=is_first_time,
request_spec=request_spec,
filter_properties=filter_properties)
except Exception as ex:
# NOTE(vish): we don't reraise the exception here to make sure
# that all instances in the request get set to
# error properly
driver.handle_schedule_error(context, ex, instance_uuid,
request_spec)
def schedule_prep_resize(self, context, image, request_spec,
filter_properties, instance, instance_type,
reservations):
"""Select a target for resize."""
host = self._schedule(context, CONF.compute_topic, request_spec,
filter_properties)
self.compute_rpcapi.prep_resize(context, image, instance,
instance_type, host, reservations)<|fim▁end|> | hosts = self._filter_hosts(request_spec, hosts, filter_properties)
if not hosts: |
<|file_name|>test_rowcount.py<|end_file_name|><|fim▁begin|>from sqlalchemy import *
from test.lib import *
class FoundRowsTest(fixtures.TestBase, AssertsExecutionResults):
"""tests rowcount functionality"""
__requires__ = ('sane_rowcount', )
@classmethod
def setup_class(cls):
global employees_table, metadata
metadata = MetaData(testing.db)
employees_table = Table('employees', metadata,
Column('employee_id', Integer,
Sequence('employee_id_seq', optional=True),
primary_key=True),
Column('name', String(50)),
Column('department', String(1)),
)
metadata.create_all()
def setup(self):
global data
data = [ ('Angela', 'A'),
('Andrew', 'A'),
('Anand', 'A'),
('Bob', 'B'),
('Bobette', 'B'),
('Buffy', 'B'),
('Charlie', 'C'),
('Cynthia', 'C'),<|fim▁hole|> def teardown(self):
employees_table.delete().execute()
@classmethod
def teardown_class(cls):
metadata.drop_all()
def testbasic(self):
s = employees_table.select()
r = s.execute().fetchall()
assert len(r) == len(data)
def test_update_rowcount1(self):
# WHERE matches 3, 3 rows changed
department = employees_table.c.department
r = employees_table.update(department=='C').execute(department='Z')
print "expecting 3, dialect reports %s" % r.rowcount
assert r.rowcount == 3
def test_update_rowcount2(self):
# WHERE matches 3, 0 rows changed
department = employees_table.c.department
r = employees_table.update(department=='C').execute(department='C')
print "expecting 3, dialect reports %s" % r.rowcount
assert r.rowcount == 3
def test_delete_rowcount(self):
# WHERE matches 3, 3 rows deleted
department = employees_table.c.department
r = employees_table.delete(department=='C').execute()
print "expecting 3, dialect reports %s" % r.rowcount
assert r.rowcount == 3<|fim▁end|> | ('Chris', 'C') ]
i = employees_table.insert()
i.execute(*[{'name':n, 'department':d} for n, d in data]) |
<|file_name|>gunicorn_config.py<|end_file_name|><|fim▁begin|># Refer to the following link for help:
# http://docs.gunicorn.org/en/latest/settings.html
command = '/home/lucas/www/reddit.lucasou.com/reddit-env/bin/gunicorn'
pythonpath = '/home/lucas/www/reddit.lucasou.com/reddit-env/flask_reddit'
bind = '127.0.0.1:8040'
workers = 1
user = 'lucas'<|fim▁hole|><|fim▁end|> | accesslog = '/home/lucas/logs/reddit.lucasou.com/gunicorn-access.log'
errorlog = '/home/lucas/logs/reddit.lucasou.com/gunicorn-error.log' |
<|file_name|>L0FailsIfThereIsMkdirError.ts<|end_file_name|><|fim▁begin|>import fs = require('fs');
import mockanswer = require('azure-pipelines-task-lib/mock-answer');
import mockrun = require('azure-pipelines-task-lib/mock-run');
import path = require('path');
let taskPath = path.join(__dirname, '..', 'copyfiles.js');
let runner: mockrun.TaskMockRunner = new mockrun.TaskMockRunner(taskPath);
runner.setInput('Contents', '**');
runner.setInput('SourceFolder', path.normalize('/srcDir'));
runner.setInput('TargetFolder', path.normalize('/destDir'));
runner.setInput('CleanTargetFolder', 'false');
runner.setInput('ignoreMakeDirErrors', 'false');
runner.setInput('Overwrite', 'false');
let answers = <mockanswer.TaskLibAnswers> {
checkPath: { },
find: { },
};
answers.checkPath[path.normalize('/srcDir')] = true;
answers.find[path.normalize('/srcDir')] = [
path.normalize('/srcDir'),
path.normalize('/srcDir/someOtherDir'),
path.normalize('/srcDir/someOtherDir/file1.file'),
path.normalize('/srcDir/someOtherDir/file2.file'),
];
runner.setAnswers(answers);
runner.registerMockExport('stats', (itemPath: string) => {
console.log('##vso[task.debug]stats ' + itemPath);
switch (itemPath) {
case path.normalize('/srcDir/someOtherDir'):
return { isDirectory: () => true };
case path.normalize('/srcDir/someOtherDir/file1.file'):
case path.normalize('/srcDir/someOtherDir/file2.file'):
return { isDirectory: () => false };
default:
throw { code: 'ENOENT' };
}
});
<|fim▁hole|> console.log(`mkdirP: ${p}`);
throw "Error during creation of target folder."
});
// as a precaution, disable fs.chmodSync. it should not be called during this scenario.
fs.chmodSync = null;
runner.registerMock('fs', fs);
runner.run();<|fim▁end|> | runner.registerMockExport('mkdirP', (p: string) => { |
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|>"""
KeepNote Extension
new_file
Extension allows adding new filetypes to a notebook<|fim▁hole|>#
# KeepNote
# Copyright (c) 2008-2011 Matt Rasmussen
# Author: Matt Rasmussen <[email protected]>
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; version 2 of the License.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301, USA.
#
import gettext
import os
import re
import shutil
import sys
import time
import xml.etree.cElementTree as etree
#_ = gettext.gettext
import keepnote
from keepnote import unicode_gtk
from keepnote.notebook import NoteBookError
from keepnote import notebook as notebooklib
from keepnote import tasklib
from keepnote import tarfile
from keepnote.gui import extension
from keepnote.gui import dialog_app_options
# pygtk imports
try:
import pygtk
pygtk.require('2.0')
from gtk import gdk
import gtk.glade
import gobject
except ImportError:
# do not fail on gtk import error,
# extension should be usable for non-graphical uses
pass
class Extension (extension.Extension):
def __init__(self, app):
"""Initialize extension"""
extension.Extension.__init__(self, app)
self.app = app
self._file_types = []
self._default_file_types = [
FileType("Text File (txt)", "untitled.txt", "plain_text.txt"),
FileType("Spreadsheet (xls)", "untitled.xls", "spreadsheet.xls"),
FileType("Word Document (doc)", "untitled.doc", "document.doc")
]
self.enabled.add(self.on_enabled)
def get_filetypes(self):
return self._file_types
def on_enabled(self, enabled):
if enabled:
self.load_config()
def get_depends(self):
return [("keepnote", ">=", (0, 7, 1))]
#===============================
# config handling
def get_config_file(self):
return self.get_data_file("config.xml")
def load_config(self):
config = self.get_config_file()
if not os.path.exists(config):
self.set_default_file_types()
self.save_default_example_files()
self.save_config()
try:
tree = etree.ElementTree(file=config)
# check root
root = tree.getroot()
if root.tag != "file_types":
raise NoteBookError("Root tag is not 'file_types'")
# iterate children
self._file_types = []
for child in root:
if child.tag == "file_type":
filetype = FileType("", "", "")
for child2 in child:
if child2.tag == "name":
filetype.name = child2.text
elif child2.tag == "filename":
filetype.filename = child2.text
elif child2.tag == "example_file":
filetype.example_file = child2.text
self._file_types.append(filetype)
except:
self.app.error("Error reading file type configuration")
self.set_default_file_types()
self.save_config()
def save_config(self):
config = self.get_config_file()
tree = etree.ElementTree(
etree.Element("file_types"))
root = tree.getroot()
for file_type in self._file_types:
elm = etree.SubElement(root, "file_type")
name = etree.SubElement(elm, "name")
name.text = file_type.name
example = etree.SubElement(elm, "example_file")
example.text = file_type.example_file
filename = etree.SubElement(elm, "filename")
filename.text = file_type.filename
tree.write(open(config, "w"), "UTF-8")
def set_default_file_types(self):
self._file_types = list(self._default_file_types)
def save_default_example_files(self):
base = self.get_base_dir()
data_dir = self.get_data_dir()
for file_type in self._default_file_types:
fn = file_type.example_file
shutil.copy(os.path.join(base, fn), os.path.join(data_dir, fn))
def update_all_menus(self):
for window in self.get_windows():
self.set_new_file_menus(window)
#==============================
# UI
def on_add_ui(self, window):
"""Initialize extension for a particular window"""
# add menu options
self.add_action(window, "New File", "New _File")
#("treeview_popup", None, None),
self.add_ui(window,
"""
<ui>
<menubar name="main_menu_bar">
<menu action="File">
<placeholder name="New">
<menuitem action="New File"/>
</placeholder>
</menu>
</menubar>
<!--
<menubar name="popup_menus">
<menu action="treeview_popup">
<placeholder action="New">
<menuitem action="New File"/>
</placeholder>
</menu>
</menubar>
-->
</ui>
""")
self.set_new_file_menus(window)
#=================================
# Options UI setup
def on_add_options_ui(self, dialog):
dialog.add_section(NewFileSection("new_file",
dialog, self._app,
self),
"extensions")
def on_remove_options_ui(self, dialog):
dialog.remove_section("new_file")
#======================================
# callbacks
def on_new_file(self, window, file_type):
"""Callback from gui to add a new file"""
notebook = window.get_notebook()
if notebook is None:
return
nodes = window.get_selected_nodes()
if len(nodes) == 0:
parent = notebook
else:
sibling = nodes[0]
if sibling.get_parent():
parent = sibling.get_parent()
index = sibling.get_attr("order") + 1
else:
parent = sibling
try:
uri = os.path.join(self.get_data_dir(), file_type.example_file)
node = notebooklib.attach_file(uri, parent)
node.rename(file_type.filename)
window.get_viewer().goto_node(node)
except Exception, e:
window.error("Error while attaching file '%s'." % uri, e)
def on_new_file_type(self, window):
"""Callback from gui for adding a new file type"""
self.app.app_options_dialog.show(window, "new_file")
#==========================================
# menu setup
def set_new_file_menus(self, window):
"""Set the recent notebooks in the file menu"""
menu = window.get_uimanager().get_widget("/main_menu_bar/File/New/New File")
if menu:
self.set_new_file_menu(window, menu)
menu = window.get_uimanager().get_widget("/popup_menus/treeview_popup/New/New File")
if menu:
self.set_new_file_menu(window, menu)
def set_new_file_menu(self, window, menu):
"""Set the recent notebooks in the file menu"""
# TODO: perform lookup of filetypes again
# init menu
if menu.get_submenu() is None:
submenu = gtk.Menu()
submenu.show()
menu.set_submenu(submenu)
menu = menu.get_submenu()
# clear menu
menu.foreach(lambda x: menu.remove(x))
def make_func(file_type):
return lambda w: self.on_new_file(window, file_type)
# populate menu
for file_type in self._file_types:
item = gtk.MenuItem(u"New %s" % file_type.name)
item.connect("activate", make_func(file_type))
item.show()
menu.append(item)
item = gtk.SeparatorMenuItem()
item.show()
menu.append(item)
item = gtk.MenuItem(u"Add New File Type")
item.connect("activate", lambda w: self.on_new_file_type(window))
item.show()
menu.append(item)
#===============================
# actions
def install_example_file(self, filename):
"""Installs a new example file into the extension"""
newpath = self.get_data_dir()
newfilename = os.path.basename(filename)
newfilename, ext = os.path.splitext(newfilename)
newfilename = notebooklib.get_unique_filename(newpath, newfilename,
ext=ext, sep=u"",
number=2)
shutil.copy(filename, newfilename)
return os.path.basename(newfilename)
class FileType (object):
"""Class containing information about a filetype"""
def __init__(self, name, filename, example_file):
self.name = name
self.filename = filename
self.example_file = example_file
def copy(self):
return FileType(self.name, self.filename, self.example_file)
class NewFileSection (dialog_app_options.Section):
"""A Section in the Options Dialog"""
def __init__(self, key, dialog, app, ext,
label=u"New File Types",
icon=None):
dialog_app_options.Section.__init__(self, key, dialog, app, label, icon)
self.ext = ext
self._filetypes = []
self._current_filetype = None
# setup UI
w = self.get_default_widget()
h = gtk.HBox(False, 5)
w.add(h)
# left column (file type list)
v = gtk.VBox(False, 5)
h.pack_start(v, False, True, 0)
self.filetype_store = gtk.ListStore(str, object)
self.filetype_listview = gtk.TreeView(self.filetype_store)
self.filetype_listview.set_headers_visible(False)
self.filetype_listview.get_selection().connect("changed",
self.on_listview_select)
sw = gtk.ScrolledWindow()
sw.set_policy(gtk.POLICY_AUTOMATIC, gtk.POLICY_AUTOMATIC)
sw.set_shadow_type(gtk.SHADOW_IN)
sw.add(self.filetype_listview)
sw.set_size_request(160, 200)
v.pack_start(sw, False, True, 0)
# create the treeview column
column = gtk.TreeViewColumn()
self.filetype_listview.append_column(column)
cell_text = gtk.CellRendererText()
column.pack_start(cell_text, True)
column.add_attribute(cell_text, 'text', 0)
# add/del buttons
h2 = gtk.HBox(False, 5)
v.pack_start(h2, False, True, 0)
button = gtk.Button("New")
button.connect("clicked", self.on_new_filetype)
h2.pack_start(button, True, True, 0)
button = gtk.Button("Delete")
button.connect("clicked", self.on_delete_filetype)
h2.pack_start(button, True, True, 0)
# right column (file type editor)
v = gtk.VBox(False, 5)
h.pack_start(v, False, True, 0)
table = gtk.Table(3, 2)
self.filetype_editor = table
v.pack_start(table, False, True, 0)
# file type name
label = gtk.Label("File type name:")
table.attach(label, 0, 1, 0, 1,
xoptions=0, yoptions=0,
xpadding=2, ypadding=2)
self.filetype = gtk.Entry()
table.attach(self.filetype, 1, 2, 0, 1,
xoptions=gtk.FILL, yoptions=0,
xpadding=2, ypadding=2)
# default filename
label = gtk.Label("Default filename:")
table.attach(label, 0, 1, 1, 2,
xoptions=0, yoptions=0,
xpadding=2, ypadding=2)
self.filename = gtk.Entry()
table.attach(self.filename, 1, 2, 1, 2,
xoptions=gtk.FILL, yoptions=0,
xpadding=2, ypadding=2)
# example new file
label = gtk.Label("Example new file:")
table.attach(label, 0, 1, 2, 3,
xoptions=0, yoptions=0,
xpadding=2, ypadding=2)
self.example_file = gtk.Entry()
table.attach(self.example_file, 1, 2, 2, 3,
xoptions=gtk.FILL, yoptions=0,
xpadding=2, ypadding=2)
# browse button
button = gtk.Button(_("Browse..."))
button.set_image(
gtk.image_new_from_stock(gtk.STOCK_OPEN,
gtk.ICON_SIZE_SMALL_TOOLBAR))
button.show()
button.connect("clicked", lambda w:
dialog_app_options.on_browse(
w.get_toplevel(), "Choose Example New File", "",
self.example_file))
table.attach(button, 1, 2, 3, 4,
xoptions=gtk.FILL, yoptions=0,
xpadding=2, ypadding=2)
w.show_all()
self.set_filetypes()
self.set_filetype_editor(None)
def load_options(self, app):
"""Load options from app to UI"""
self._filetypes = [x.copy() for x in self.ext.get_filetypes()]
self.set_filetypes()
self.filetype_listview.get_selection().unselect_all()
def save_options(self, app):
"""Save options to the app"""
self.save_current_filetype()
# install example files
bad = []
for filetype in self._filetypes:
if os.path.isabs(filetype.example_file):
# copy new file into extension data dir
try:
filetype.example_file = self.ext.install_example_file(
filetype.example_file)
except Exception, e:
app.error("Cannot install example file '%s'" %
filetype.example_file, e)
bad.append(filetype)
# update extension state
self.ext.get_filetypes()[:] = [x.copy() for x in self._filetypes
if x not in bad]
self.ext.save_config()
self.ext.update_all_menus()
def set_filetypes(self):
"""Initialize the lisview to the loaded filetypes"""
self.filetype_store.clear()
for filetype in self._filetypes:
self.filetype_store.append([filetype.name, filetype])
def set_filetype_editor(self, filetype):
"""Update editor with current filetype"""
if filetype is None:
self._current_filetype = None
self.filetype.set_text("")
self.filename.set_text("")
self.example_file.set_text("")
self.filetype_editor.set_sensitive(False)
else:
self._current_filetype = filetype
self.filetype.set_text(filetype.name)
self.filename.set_text(filetype.filename)
self.example_file.set_text(filetype.example_file)
self.filetype_editor.set_sensitive(True)
def save_current_filetype(self):
"""Save the contents of the editor into the current filetype object"""
if self._current_filetype:
self._current_filetype.name = self.filetype.get_text()
self._current_filetype.filename = self.filename.get_text()
self._current_filetype.example_file = self.example_file.get_text()
# update filetype list
for row in self.filetype_store:
if row[1] == self._current_filetype:
row[0] = self._current_filetype.name
def on_listview_select(self, selection):
"""Callback for when listview selection changes"""
model, it = self.filetype_listview.get_selection().get_selected()
self.save_current_filetype()
# set editor to current selection
if it is not None:
filetype = self.filetype_store[it][1]
self.set_filetype_editor(filetype)
else:
self.set_filetype_editor(None)
def on_new_filetype(self, button):
"""Callback for adding a new filetype"""
self._filetypes.append(FileType(u"New File Type", u"untitled", ""))
self.set_filetypes()
self.filetype_listview.set_cursor((len(self._filetypes)-1,))
def on_delete_filetype(self, button):
model, it = self.filetype_listview.get_selection().get_selected()
if it is not None:
filetype = self.filetype_store[it][1]
self._filetypes.remove(filetype)
self.set_filetypes()<|fim▁end|> | """
|
<|file_name|>lib.rs<|end_file_name|><|fim▁begin|>#![feature(inclusive_range_syntax)]
#![cfg_attr(test, feature(test))]
#[cfg(test)]
<|fim▁hole|>pub mod search;
pub mod sort;
pub mod data_structures;<|fim▁end|> | extern crate test;
pub mod math;
pub mod select; |
<|file_name|>Draw.hpp<|end_file_name|><|fim▁begin|>/*
Copyright_License {
XCSoar Glide Computer - http://www.xcsoar.org/
Copyright (C) 2000-2015 The XCSoar Project
A detailed list of copyright holders can be found in the file "AUTHORS".
This program is free software; you can redistribute it and/or
modify it under the terms of the GNU General Public License
as published by the Free Software Foundation; either version 2
of the License, or (at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program; if not, write to the Free Software
Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
}
*/
#ifndef XCSOAR_FORM_DRAW_HPP
#define XCSOAR_FORM_DRAW_HPP
#include "Screen/PaintWindow.hpp"
#include <functional>
class ContainerWindow;
/**
* This class is used for creating custom drawn content.
* It is based on the WindowControl class.<|fim▁hole|>
public:
template<typename CB>
void Create(ContainerWindow &parent,
PixelRect rc, const WindowStyle style,
CB &&_paint) {
mOnPaintCallback = std::move(_paint);
PaintWindow::Create(parent, rc, style);
}
protected:
/**
* The callback function for painting the content of the control
* @see SetOnPaintNotify()
*/
std::function<void(Canvas &canvas, const PixelRect &rc)> mOnPaintCallback;
/** from class PaintWindow */
virtual void OnPaint(Canvas &canvas) override;
};
#endif<|fim▁end|> | */
class WndOwnerDrawFrame : public PaintWindow {
public:
typedef void (*OnPaintCallback_t)(Canvas &canvas, const PixelRect &rc); |
<|file_name|>urls.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
#
# <|fim▁hole|># This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
# Author: Aleksandra Tarkowska <A(dot)Tarkowska(at)dundee(dot)ac(dot)uk>, 2008.
#
# Version: 1.0
#
from django.conf.urls import *
from omeroweb.webstart import views
urlpatterns = patterns('django.views.generic.simple',
url( r'^$', views.index, name="webstart_index" ),
url( r'^jars/insight\.jnlp$', views.insight, name='webstart_insight'),
)<|fim▁end|> | # Copyright (c) 2008 University of Dundee.
# |
<|file_name|>0006_auto_20150612_2307.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
from django.db import models, migrations
<|fim▁hole|>
class Migration(migrations.Migration):
dependencies = [
('entries', '0005_resultsmode_json'),
]
operations = [
migrations.AlterField(
model_name='resultsmode',
name='json',
field=models.TextField(default='', blank=True),
),
]<|fim▁end|> | |
<|file_name|>cte.js<|end_file_name|><|fim▁begin|>'use strict';
const { expect } = require('chai');
const { getParsedSql } = require('./util');
describe('common table expressions', () => {
it('should support single CTE', () => {
const sql = `
WITH cte AS (SELECT 1)
SELECT * FROM cte
`.trim();
expect(getParsedSql(sql)).to.equal('WITH "cte" AS (SELECT 1) SELECT * FROM "cte"');
});
it('should support multiple CTE', () => {
const expected = 'WITH "cte1" AS (SELECT 1), "cte2" AS (SELECT 2) ' +
'SELECT * FROM "cte1" UNION SELECT * FROM "cte2"';
const sql = `
WITH cte1 AS (SELECT 1), cte2 AS (SELECT 2)
SELECT * FROM cte1 UNION SELECT * FROM cte2
`.trim();
expect(getParsedSql(sql)).to.equal(expected)
});
it('should support CTE with column', () => {
const sql = `
WITH cte (col1) AS (SELECT 1)
SELECT * FROM cte
`.trim();
expect(getParsedSql(sql)).to.contain('(col1)');
});
<|fim▁hole|> const sql = `
WITH cte (col1, col2) AS (SELECT 1, 2)
SELECT * FROM cte
`.trim();
expect(getParsedSql(sql)).to.contain('(col1, col2)');
});
it('should support recursive CTE', () => {
const sql = `
WITH RECURSIVE cte(n) AS
(
SELECT 1
UNION
SELECT n + 1 FROM cte WHERE n < 5
)
SELECT * FROM cte
`.trim();
expect(getParsedSql(sql)).to.match(/^WITH RECURSIVE/);
});
});<|fim▁end|> | it('should support CTE with multiple columns', () => { |
<|file_name|>value.rs<|end_file_name|><|fim▁begin|>use crate::ast::InputValue;
use crate::{
parser::{ParseError, ParseResult, Parser, ScalarToken, SourcePosition, Spanning, Token},
schema::{<|fim▁hole|> meta::{InputObjectMeta, MetaType},
model::SchemaType,
},
value::ScalarValue,
};
pub fn parse_value_literal<'a, 'b, S>(
parser: &mut Parser<'a>,
is_const: bool,
schema: &'b SchemaType<'b, S>,
tpe: Option<&MetaType<'b, S>>,
) -> ParseResult<'a, InputValue<S>>
where
S: ScalarValue,
{
match (parser.peek(), tpe) {
(
&Spanning {
item: Token::BracketOpen,
..
},
_,
) => parse_list_literal(parser, is_const, schema, tpe),
(
&Spanning {
item: Token::CurlyOpen,
..
},
None,
) => parse_object_literal(parser, is_const, schema, None),
(
&Spanning {
item: Token::CurlyOpen,
..
},
Some(&MetaType::InputObject(ref o)),
) => parse_object_literal(parser, is_const, schema, Some(o)),
(
&Spanning {
item: Token::Dollar,
..
},
_,
) if !is_const => parse_variable_literal(parser),
(
&Spanning {
item: Token::Scalar(_),
..
},
Some(&MetaType::Scalar(ref s)),
) => {
if let Spanning {
item: Token::Scalar(scalar),
start,
end,
} = parser.next_token()?
{
(s.parse_fn)(scalar)
.map(|s| Spanning::start_end(&start, &end, InputValue::Scalar(s)))
.or_else(|_| parse_scalar_literal_by_infered_type(scalar, &start, &end, schema))
} else {
unreachable!()
}
}
(
&Spanning {
item: Token::Scalar(_),
..
},
_,
) => {
if let Spanning {
item: Token::Scalar(token),
start,
end,
} = parser.next_token()?
{
parse_scalar_literal_by_infered_type(token, &start, &end, schema)
} else {
unreachable!()
}
}
(
&Spanning {
item: Token::Name("true"),
..
},
_,
) => Ok(parser.next_token()?.map(|_| InputValue::scalar(true))),
(
&Spanning {
item: Token::Name("false"),
..
},
_,
) => Ok(parser.next_token()?.map(|_| InputValue::scalar(false))),
(
&Spanning {
item: Token::Name("null"),
..
},
_,
) => Ok(parser.next_token()?.map(|_| InputValue::null())),
(
&Spanning {
item: Token::Name(name),
..
},
_,
) => Ok(parser.next_token()?.map(|_| InputValue::enum_value(name))),
_ => Err(parser.next_token()?.map(ParseError::UnexpectedToken)),
}
}
fn parse_list_literal<'a, 'b, S>(
parser: &mut Parser<'a>,
is_const: bool,
schema: &'b SchemaType<'b, S>,
tpe: Option<&MetaType<'b, S>>,
) -> ParseResult<'a, InputValue<S>>
where
S: ScalarValue,
{
Ok(parser
.delimited_list(
&Token::BracketOpen,
|p| parse_value_literal(p, is_const, schema, tpe),
&Token::BracketClose,
)?
.map(InputValue::parsed_list))
}
fn parse_object_literal<'a, 'b, S>(
parser: &mut Parser<'a>,
is_const: bool,
schema: &'b SchemaType<'b, S>,
object_tpe: Option<&InputObjectMeta<'b, S>>,
) -> ParseResult<'a, InputValue<S>>
where
S: ScalarValue,
{
Ok(parser
.delimited_list(
&Token::CurlyOpen,
|p| parse_object_field(p, is_const, schema, object_tpe),
&Token::CurlyClose,
)?
.map(|items| InputValue::parsed_object(items.into_iter().map(|s| s.item).collect())))
}
fn parse_object_field<'a, 'b, S>(
parser: &mut Parser<'a>,
is_const: bool,
schema: &'b SchemaType<'b, S>,
object_meta: Option<&InputObjectMeta<'b, S>>,
) -> ParseResult<'a, (Spanning<String>, Spanning<InputValue<S>>)>
where
S: ScalarValue,
{
let key = parser.expect_name()?;
let tpe = object_meta
.and_then(|o| o.input_fields.iter().find(|f| f.name == key.item))
.and_then(|f| schema.lookup_type(&f.arg_type));
parser.expect(&Token::Colon)?;
let value = parse_value_literal(parser, is_const, schema, tpe)?;
Ok(Spanning::start_end(
&key.start,
&value.end.clone(),
(key.map(|s| s.to_owned()), value),
))
}
fn parse_variable_literal<'a, S>(parser: &mut Parser<'a>) -> ParseResult<'a, InputValue<S>>
where
S: ScalarValue,
{
let Spanning {
start: start_pos, ..
} = parser.expect(&Token::Dollar)?;
let Spanning {
item: name,
end: end_pos,
..
} = parser.expect_name()?;
Ok(Spanning::start_end(
&start_pos,
&end_pos,
InputValue::variable(name),
))
}
fn parse_scalar_literal_by_infered_type<'a, 'b, S>(
token: ScalarToken<'a>,
start: &SourcePosition,
end: &SourcePosition,
schema: &'b SchemaType<'b, S>,
) -> ParseResult<'a, InputValue<S>>
where
S: ScalarValue,
{
let result = match token {
ScalarToken::String(_) => {
if let Some(&MetaType::Scalar(ref s)) = schema.concrete_type_by_name("String") {
(s.parse_fn)(token).map(InputValue::Scalar)
} else {
Err(ParseError::ExpectedScalarError(
"There needs to be a String type",
))
}
}
ScalarToken::Int(_) => {
if let Some(&MetaType::Scalar(ref s)) = schema.concrete_type_by_name("Int") {
(s.parse_fn)(token).map(InputValue::Scalar)
} else {
Err(ParseError::ExpectedScalarError(
"There needs to be an Int type",
))
}
}
ScalarToken::Float(_) => {
if let Some(&MetaType::Scalar(ref s)) = schema.concrete_type_by_name("Float") {
(s.parse_fn)(token).map(InputValue::Scalar)
} else {
Err(ParseError::ExpectedScalarError(
"There needs to be a Float type",
))
}
}
};
result
.map(|s| Spanning::start_end(start, end, s))
.map_err(|e| Spanning::start_end(start, end, e))
}<|fim▁end|> | |
<|file_name|>core-export-f64-sqrt.rs<|end_file_name|><|fim▁begin|>// Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
<|fim▁hole|>
let digits: uint = 10 as uint;
::core::io::println(float::to_str_digits(f64::sqrt(42.0f64) as float, digits));
}<|fim▁end|> | // Regression test that f64 exports things properly
pub fn main() { |
<|file_name|>api-endpoints.js<|end_file_name|><|fim▁begin|>module.exports = function (client) {
const baseUrl = '/api-endpoints/';<|fim▁hole|> .send(endpointConfig)
.then(res => res.body);
},
update (name, endpointConfig) {
return this.create(name, endpointConfig);
},
remove (name) {
return client
.del(`${baseUrl}${encodeURIComponent(name)}`)
.then(res => res.body);
},
info (name) {
return client
.get(`${baseUrl}${encodeURIComponent(name)}`)
.then(res => res.body);
},
list () {
return client
.get(baseUrl)
.then(res => res.body);
}
};
};<|fim▁end|> | return {
create (name, endpointConfig) {
return client
.put(`${baseUrl}${encodeURIComponent(name)}`) |
<|file_name|>io-queue.js<|end_file_name|><|fim▁begin|>version https://git-lfs.github.com/spec/v1<|fim▁hole|>size 3654<|fim▁end|> | oid sha256:5f5740cfcc24e2a730f7ea590ae0dc07d66d256fd183c46facf3fdfeb0bd69d2 |
<|file_name|>unix.rs<|end_file_name|><|fim▁begin|>#![cfg(any(
target_os = "linux",
target_os = "dragonfly",
target_os = "freebsd",
target_os = "netbsd",
target_os = "openbsd",
))]
use crate::platform::ContextTraitExt;
pub use crate::platform_impl::{HeadlessContextExt, RawContextExt, RawHandle};
use crate::{Context, ContextCurrentState};
pub use glutin_egl_sys::EGLContext;
#[cfg(feature = "x11")]
pub use glutin_glx_sys::GLXContext;
pub use winit::platform::unix::*;
use std::os::raw;
impl<T: ContextCurrentState> ContextTraitExt for Context<T> {
type Handle = RawHandle;
#[inline]
unsafe fn raw_handle(&self) -> Self::Handle {
self.context.raw_handle()
}
#[inline]<|fim▁hole|> }
}<|fim▁end|> | unsafe fn get_egl_display(&self) -> Option<*const raw::c_void> {
self.context.get_egl_display() |
<|file_name|>services_test.go<|end_file_name|><|fim▁begin|>package services
import (
"github.com/stretchr/testify/assert"
"testing"
)
func ReadJsonConfFileServiceTest(t *testing.T) {
confFilename := "../testdata/fixtures/system.json"
f, _ := ReadJsonConfFileService(confFilename)
if len(f.Files) < 1 {
t.Fatal("no Files found in " + confFilename)
}
<|fim▁hole|> actual = f.Files[i].Pattern
break
}
expected := "installed.*"
assert.Equal(t, expected, actual, "unexpected config pattern")
}<|fim▁end|> | var actual string = ""
for i := 0; i < len(f.Files); i++ { |
<|file_name|>mod.rs<|end_file_name|><|fim▁begin|>//!
//! A binding for the library `SDL2_gfx`
//!
//!
//! Note that you need to build with the
//! feature `gfx` for this module to be enabled,
//! like so:
//!
//! ```bash
//! $ cargo build --features "gfx"
//! ```
//!
//! If you want to use this with from inside your own
//! crate, you will need to add this in your Cargo.toml<|fim▁hole|>//! version = ...
//! default-features = false
//! features = ["gfx"]
//! ```
pub mod framerate;
pub mod imagefilter;
pub mod primitives;
pub mod rotozoom;<|fim▁end|> | //!
//! ```toml
//! [dependencies.sdl2] |
<|file_name|>roman_numerals3.py<|end_file_name|><|fim▁begin|># https://www.codewars.com/kata/roman-numerals-encoder/train/python
def solution(n):
# Print the arguments
print('n = {}'.format(n))
roman_blueprint = [] # List of lists to store format structure: [[<letter>, <count>], ...]
result = [] # Store the final roman numeral result
# Calculate for roman numerals
# Calculate the I's -- 1
numI = n % 5 # Do modulo 5, since we want to look at the I's
if (numI == 4): # If the modul result is 4, then we just want 1 roman numeral, before the previous digit
numI = -1 # Set it to '-1' to track the position, and we can use abs() to get the value/count
roman_blueprint.append(['I', numI])
# Calculate the V's -- 5
numV = n % 10 # Do modulo 10, since we want to look at the V's
if (4 <= numV <= 8):
numV = 1
else:
numV = 0
roman_blueprint.append(['V', numV])
# Calculate the X's -- 10
numX = n % 50 # Do modulo 10, since we want to look at the X's
if (numX <= 8):
numX = 0
elif (9 <= numX <= 18): # (0 * 10) + 0 <= (numX - 9) <= (0 * 10) + 9
numX = 1
elif (19 <= numX <= 28): # (1 * 10) + 0 <= (numX - 9) <= (1 * 10) + 9
numX = 2
elif (29 <= numX <= 38): # (2 * 10) + 0 <= (numX - 9) <= (2 * 10) + 9
numX = 3
elif (numX == 39): # (numX - 9) == (3 * 10) + 0
numX = 4
elif (40 <= numX <= 48): # (3 * 10) + 1 <= (numX - 9) <= (3 * 10) + 9
numX = -1
else: # (numX - 9) == (4 * 10) + 0
numX = -2
roman_blueprint.append(['X', numX])
# Calculate the L's -- 50
numL = n % 100 # Do modulo 100, since we want to look at the L's
if (40 <= numL <= 89):
numL = 1
else:
numL = 0
roman_blueprint.append(['L', numL])
# Calculate the C's -- 100
numC = n % 500 # Do modulo 10, since we want to look at the C's
if (numC <= 89):
numC = 0
elif (90 <= numC <= 189): # (0 * 100) + 0 <= (numC - 90) <= (0 * 100) + 99
numC = 1
elif (190 <= numC <= 289): # (1 * 100) + 0 <= (numC - 90) <= (1 * 100) + 99
numC = 2
elif (290 <= numC <= 389): # (2 * 100) + 0 <= (numC - 90) <= (2 * 100) + 99
numC = 3
elif (390 <= numC <= 399): # (3 * 100) + 0 <= (numC - 90) <= (3 * 100) + 9
numC = 4
elif (400 <= numC <= 489): # (3 * 100) + 10 <= (numC - 90) <= (3 * 100) + 99
numC = -1
else: # (4 * 100) + 0 <= (numC - 90) <= (4 * 100) + 9
numC = -2
roman_blueprint.append(['C', numC])
# Calculate the D's -- 500
numD = n % 1000 # Do modulo 1000, since we want to look at the D's
if (400 <= numD <= 899):
numD = 1
else:
numD = 0
roman_blueprint.append(['D', numD])
# Calculate the M's -- 1000
numM = n % 5000 # Do modulo 10, since we want to look at the M's
if (numM <= 899):
numM = 0
elif (900 <= numM <= 1899): # (0 * 1000) + 0 <= (numM - 900) <= (0 * 1000) + 999
numM = 1
elif (1900 <= numM <= 2899): # (1 * 1000) + 0 <= (numM - 900) <= (1 * 1000) + 999
numM = 2
elif (2900 <= numM <= 3899): # (2 * 1000) + 0 <= (numM - 900) <= (2 * 1000) + 999
numM = 3
elif (3900 <= numM <= 4899): # (3 * 1000) + 0 <= (numM - 900) <= (3 * 1000) + 99
numM = 4
else:
numM = 5
roman_blueprint.append(['M', numM])
# Format the output
# Format from largest to smallest
for numeral, count in roman_blueprint[::-1]:
if (count < 0): # We have an M to be used for subtraction
result.insert(-1, numeral)
count = abs(count + 1) # Increment number of positive M's to add, take the ABS value<|fim▁hole|> result.extend([numeral] * count)
# Join the final result
result = ''.join(result)
# Show the final result
print('result = {}'.format(result))
return result<|fim▁end|> | if (count >= 0): # If we have M's |
<|file_name|>0082_global_disaggregations_data_migration.py<|end_file_name|><|fim▁begin|># Generated by Django 2.2.5 on 2019-12-26 20:45
from django.db import migrations, models
import django.db.models.deletion
def non_reversible_migration(apps, schema_editor):
"""Operation to "reverse" an unreversible change"""
pass
def remove_non_sadd_disaggregations(apps, schema_editor):
DisaggregationType = apps.get_model('indicators', 'DisaggregationType')
standard_disaggregations = DisaggregationType.objects.filter(standard=True, is_archived=False)
for standard_disaggregation in standard_disaggregations:
if standard_disaggregation.disaggregation_type == '---':
standard_disaggregation.delete()
elif standard_disaggregation.country_id == 6:
standard_disaggregation.standard = False
standard_disaggregation.save()
elif standard_disaggregation.disaggregation_type == 'SADD - MC Standard':
standard_disaggregation.disaggregation_type = 'Sex and Age Disaggregated Data (SADD)'
standard_disaggregation.selected_by_default = True
standard_disaggregation.save()
def assign_sadd_to_all_existing_indicators(apps, schema_editor):
Indicator = apps.get_model('indicators', 'Indicator')
DisaggregationType = apps.get_model('indicators', 'DisaggregationType')<|fim▁hole|> sadd_disagg = DisaggregationType.objects.get(pk=109)
except DisaggregationType.DoesNotExist:
return
for indicator in Indicator.objects.all():
indicator.disaggregation.add(sadd_disagg)
class Migration(migrations.Migration):
dependencies = [
('indicators', '0081_remove_legacy_disaggregationvalue'),
]
operations = [
migrations.AlterField(
model_name='disaggregationtype',
name='country',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='workflow.Country', verbose_name='Country'),
),
migrations.RunPython(remove_non_sadd_disaggregations, non_reversible_migration),
migrations.RunPython(assign_sadd_to_all_existing_indicators, non_reversible_migration),
]<|fim▁end|> | try: |
<|file_name|>gc_engine.py<|end_file_name|><|fim▁begin|># -*- encoding: UTF-8 -*-
import re
import sys
import os
import traceback
from ..ibdawg import IBDAWG
from ..echo import echo
from . import gc_options
__all__ = [ "lang", "locales", "pkg", "name", "version", "author", \
"load", "parse", "getDictionary", \
"setOptions", "getOptions", "getOptionsLabels", "resetOptions", \
"ignoreRule", "resetIgnoreRules" ]
__version__ = u"${version}"
lang = u"${lang}"
locales = ${loc}
pkg = u"${implname}"
name = u"${name}"
version = u"${version}"
author = u"${author}"
# commons regexes
_zEndOfSentence = re.compile(u'([.?!:;…][ .?!… »”")]*|.$)')
_zBeginOfParagraph = re.compile(u"^\W*")
_zEndOfParagraph = re.compile(u"\W*$")
_zNextWord = re.compile(u" +(\w[\w-]*)")
_zPrevWord = re.compile(u"(\w[\w-]*) +$")
# grammar rules and dictionary
_rules = None
_dOptions = dict(gc_options.dOpt) # duplication necessary, to be able to reset to default
_aIgnoredRules = set()
_oDict = None
_dAnalyses = {} # cache for data from dictionary
_GLOBALS = globals()
#### Parsing
def parse (sText, sCountry="${country_default}", bDebug=False, dOptions=None):
"analyses the paragraph sText and returns list of errors"
aErrors = None
sAlt = sText
dDA = {}
dOpt = _dOptions if not dOptions else dOptions
# parse paragraph
try:
sNew, aErrors = _proofread(sText, sAlt, 0, True, dDA, sCountry, dOpt, bDebug)
if sNew:
sText = sNew
except:
raise
# parse sentences
for iStart, iEnd in _getSentenceBoundaries(sText):
if 4 < (iEnd - iStart) < 2000:
dDA.clear()
try:
_, errs = _proofread(sText[iStart:iEnd], sAlt[iStart:iEnd], iStart, False, dDA, sCountry, dOpt, bDebug)
aErrors.extend(errs)
except:
raise
return aErrors
def _getSentenceBoundaries (sText):
iStart = _zBeginOfParagraph.match(sText).end()
for m in _zEndOfSentence.finditer(sText):
yield (iStart, m.end())
iStart = m.end()
def _proofread (s, sx, nOffset, bParagraph, dDA, sCountry, dOptions, bDebug):
aErrs = []
bChange = False
if not bParagraph:
# after the first pass, we modify automatically some characters
if u" " in s:
s = s.replace(u" ", u' ') # nbsp
bChange = True
if u" " in s:
s = s.replace(u" ", u' ') # nnbsp
bChange = True
if u"@" in s:
s = s.replace(u"@", u' ')
bChange = True
if u"'" in s:
s = s.replace(u"'", u"’")
bChange = True
if u"‑" in s:
s = s.replace(u"‑", u"-") # nobreakdash
bChange = True
bIdRule = option('idrule')
for sOption, lRuleGroup in _getRules(bParagraph):
if not sOption or dOptions.get(sOption, False):
for zRegex, bUppercase, sRuleId, lActions in lRuleGroup:
if sRuleId not in _aIgnoredRules:
for m in zRegex.finditer(s):
for sFuncCond, cActionType, sWhat, *eAct in lActions:
# action in lActions: [ condition, action type, replacement/suggestion/action[, iGroup[, message, URL]] ]
try:
if not sFuncCond or _GLOBALS[sFuncCond](s, sx, m, dDA, sCountry):
if cActionType == "-":
# grammar error
# (text, replacement, nOffset, m, iGroup, sId, bUppercase, sURL, bIdRule)
aErrs.append(_createError(s, sWhat, nOffset, m, eAct[0], sRuleId, bUppercase, eAct[1], eAct[2], bIdRule, sOption))
elif cActionType == "~":
# text processor
s = _rewrite(s, sWhat, eAct[0], m, bUppercase)
bChange = True
if bDebug:
echo(u"~ " + s + " -- " + m.group(eAct[0]) + " # " + sRuleId)
elif cActionType == "=":
# disambiguation
_GLOBALS[sWhat](s, m, dDA)
if bDebug:
echo(u"= " + m.group(0) + " # " + sRuleId + "\nDA: " + str(dDA))
else:
echo("# error: unknown action at " + sRuleId)
except Exception as e:
raise Exception(str(e), sRuleId)
if bChange:
return (s, aErrs)
return (False, aErrs)
def _createWriterError (s, sRepl, nOffset, m, iGroup, sId, bUppercase, sMsg, sURL, bIdRule, sOption):
"error for Writer (LO/OO)"
xErr = SingleProofreadingError()
#xErr = uno.createUnoStruct( "com.sun.star.linguistic2.SingleProofreadingError" )
xErr.nErrorStart = nOffset + m.start(iGroup)
xErr.nErrorLength = m.end(iGroup) - m.start(iGroup)
xErr.nErrorType = PROOFREADING
xErr.aRuleIdentifier = sId
# suggestions
if sRepl[0:1] == "=":
sugg = _GLOBALS[sRepl[1:]](s, m)
if sugg:
if bUppercase and m.group(iGroup)[0:1].isupper():
xErr.aSuggestions = tuple(map(str.capitalize, sugg.split("|")))
else:
xErr.aSuggestions = tuple(sugg.split("|"))
else:
xErr.aSuggestions = ()
elif sRepl == "_":
xErr.aSuggestions = ()
else:
if bUppercase and m.group(iGroup)[0:1].isupper():
xErr.aSuggestions = tuple(map(str.capitalize, m.expand(sRepl).split("|")))
else:
xErr.aSuggestions = tuple(m.expand(sRepl).split("|"))
# Message
if sMsg[0:1] == "=":
sMessage = _GLOBALS[sMsg[1:]](s, m)
else:
sMessage = m.expand(sMsg)
xErr.aShortComment = sMessage # sMessage.split("|")[0] # in context menu
xErr.aFullComment = sMessage # sMessage.split("|")[-1] # in dialog
if bIdRule:
xErr.aShortComment += " # " + sId
# URL
if sURL:
p = PropertyValue()
p.Name = "FullCommentURL"
p.Value = sURL
xErr.aProperties = (p,)
else:
xErr.aProperties = ()
return xErr
def _createDictError (s, sRepl, nOffset, m, iGroup, sId, bUppercase, sMsg, sURL, bIdRule, sOption):
"error as a dictionary"
dErr = {}
dErr["nStart"] = nOffset + m.start(iGroup)
dErr["nEnd"] = nOffset + m.end(iGroup)
dErr["sRuleId"] = sId
dErr["sType"] = sOption if sOption else "notype"
# suggestions
if sRepl[0:1] == "=":
sugg = _GLOBALS[sRepl[1:]](s, m)
if sugg:
if bUppercase and m.group(iGroup)[0:1].isupper():
dErr["aSuggestions"] = list(map(str.capitalize, sugg.split("|")))
else:
dErr["aSuggestions"] = sugg.split("|")
else:
dErr["aSuggestions"] = ()
elif sRepl == "_":
dErr["aSuggestions"] = ()
else:
if bUppercase and m.group(iGroup)[0:1].isupper():
dErr["aSuggestions"] = list(map(str.capitalize, m.expand(sRepl).split("|")))
else:
dErr["aSuggestions"] = m.expand(sRepl).split("|")
# Message
if sMsg[0:1] == "=":
sMessage = _GLOBALS[sMsg[1:]](s, m)
else:
sMessage = m.expand(sMsg)
dErr["sMessage"] = sMessage
if bIdRule:
dErr["sMessage"] += " # " + sId
# URL
dErr["URL"] = sURL if sURL else ""
return dErr
def _rewrite (s, sRepl, iGroup, m, bUppercase):
"text processor: write sRepl in s at iGroup position"
ln = m.end(iGroup) - m.start(iGroup)
if sRepl == "*":
sNew = " " * ln
elif sRepl == ">" or sRepl == "_" or sRepl == u"~":
sNew = sRepl + " " * (ln-1)
elif sRepl == "@":
sNew = "@" * ln
elif sRepl[0:1] == "=":
if sRepl[1:2] != "@":
sNew = _GLOBALS[sRepl[1:]](s, m)
sNew = sNew + " " * (ln-len(sNew))
else:
sNew = _GLOBALS[sRepl[2:]](s, m)
sNew = sNew + "@" * (ln-len(sNew))
if bUppercase and m.group(iGroup)[0:1].isupper():
sNew = sNew.capitalize()
else:
sNew = m.expand(sRepl)
sNew = sNew + " " * (ln-len(sNew))
return s[0:m.start(iGroup)] + sNew + s[m.end(iGroup):]
def ignoreRule (sId):
_aIgnoredRules.add(sId)
def resetIgnoreRules ():
_aIgnoredRules.clear()
#### init
try:
# LibreOffice / OpenOffice
from com.sun.star.linguistic2 import SingleProofreadingError
from com.sun.star.text.TextMarkupType import PROOFREADING
from com.sun.star.beans import PropertyValue
#import lightproof_handler_${implname} as opt
_createError = _createWriterError
except ImportError:
_createError = _createDictError
def load ():
global _oDict
try:
_oDict = IBDAWG("${binary_dic}")
except:
traceback.print_exc()
def setOptions (dOpt):
_dOptions.update(dOpt)
def getOptions ():
return _dOptions
def getOptionsLabels (sLang):
return gc_options.getUI(sLang)
def resetOptions ():
global _dOptions
_dOptions = dict(gc_options.dOpt)
def getDictionary ():
return _oDict
def _getRules (bParagraph):
try:
if not bParagraph:
return _rules.lSentenceRules
return _rules.lParagraphRules
except:
_loadRules()
if not bParagraph:
return _rules.lSentenceRules
return _rules.lParagraphRules
def _loadRules2 ():
from itertools import chain
from . import gc_rules
global _rules
_rules = gc_rules
# compile rules regex
for rule in chain(_rules.lParagraphRules, _rules.lSentenceRules):
try:
rule[1] = re.compile(rule[1])
except:
echo("Bad regular expression in # " + str(rule[3]))
rule[1] = "(?i)<Grammalecte>"
def _loadRules ():
from itertools import chain
from . import gc_rules
global _rules
_rules = gc_rules
# compile rules regex
for rulegroup in chain(_rules.lParagraphRules, _rules.lSentenceRules):
for rule in rulegroup[1]:
try:
rule[0] = re.compile(rule[0])
except:
echo("Bad regular expression in # " + str(rule[2]))
rule[0] = "(?i)<Grammalecte>"
def _getPath ():
return os.path.join(os.path.dirname(sys.modules[__name__].__file__), __name__ + ".py")
#### common functions
def option (sOpt):
"return True if option sOpt is active"
return _dOptions.get(sOpt, False)
def displayInfo (dDA, tWord):
"for debugging: retrieve info of word"
if not tWord:
echo("> nothing to find")
return True
if tWord[1] not in _dAnalyses and not _storeMorphFromFSA(tWord[1]):
echo("> not in FSA")
return True
if tWord[0] in dDA:
echo("DA: " + str(dDA[tWord[0]]))
echo("FSA: " + str(_dAnalyses[tWord[1]]))
return True
def _storeMorphFromFSA (sWord):
"retrieves morphologies list from _oDict -> _dAnalyses"
global _dAnalyses
_dAnalyses[sWord] = _oDict.getMorph(sWord)
return True if _dAnalyses[sWord] else False
def morph (dDA, tWord, sPattern, bStrict=True, bNoWord=False):
"analyse a tuple (position, word), return True if sPattern in morphologies (disambiguation on)"
if not tWord:
return bNoWord
if tWord[1] not in _dAnalyses and not _storeMorphFromFSA(tWord[1]):
return False
lMorph = dDA[tWord[0]] if tWord[0] in dDA else _dAnalyses[tWord[1]]
if not lMorph:
return False
p = re.compile(sPattern)
if bStrict:
return all(p.search(s) for s in lMorph)
return any(p.search(s) for s in lMorph)
def morphex (dDA, tWord, sPattern, sNegPattern, bNoWord=False):
"analyse a tuple (position, word), returns True if not sNegPattern in word morphologies and sPattern in word morphologies (disambiguation on)"
if not tWord:
return bNoWord
if tWord[1] not in _dAnalyses and not _storeMorphFromFSA(tWord[1]):
return False
lMorph = dDA[tWord[0]] if tWord[0] in dDA else _dAnalyses[tWord[1]]
# check negative condition
np = re.compile(sNegPattern)
if any(np.search(s) for s in lMorph):
return False
# search sPattern
p = re.compile(sPattern)
return any(p.search(s) for s in lMorph)
def analyse (sWord, sPattern, bStrict=True):
"analyse a word, return True if sPattern in morphologies (disambiguation off)"
if sWord not in _dAnalyses and not _storeMorphFromFSA(sWord):
return False
if not _dAnalyses[sWord]:
return False
p = re.compile(sPattern)
if bStrict:
return all(p.search(s) for s in _dAnalyses[sWord])
return any(p.search(s) for s in _dAnalyses[sWord])
def analysex (sWord, sPattern, sNegPattern):
"analyse a word, returns True if not sNegPattern in word morphologies and sPattern in word morphologies (disambiguation off)"
if sWord not in _dAnalyses and not _storeMorphFromFSA(sWord):
return False
# check negative condition
np = re.compile(sNegPattern)
if any(np.search(s) for s in _dAnalyses[sWord]):
return False
# search sPattern
p = re.compile(sPattern)
return any(p.search(s) for s in _dAnalyses[sWord])
def stem (sWord):
"returns a list of sWord's stems"
if not sWord:
return []
if sWord not in _dAnalyses and not _storeMorphFromFSA(sWord):
return []
return [ s[1:s.find(" ")] for s in _dAnalyses[sWord] ]
## functions to get text outside pattern scope
# warning: check compile_rules.py to understand how it works
def nextword (s, iStart, n):
"get the nth word of the input string or empty string"
m = re.match(u"( +[\\w%-]+){" + str(n-1) + u"} +([\\w%-]+)", s[iStart:])
if not m:
return None
return (iStart+m.start(2), m.group(2))
def prevword (s, iEnd, n):
"get the (-)nth word of the input string or empty string"
m = re.search(u"([\\w%-]+) +([\\w%-]+ +){" + str(n-1) + u"}$", s[:iEnd])
if not m:
return None
return (m.start(1), m.group(1))
def nextword1 (s, iStart):
"get next word (optimization)"
m = _zNextWord.match(s[iStart:])
if not m:
return None
return (iStart+m.start(1), m.group(1))
def prevword1 (s, iEnd):
"get previous word (optimization)"<|fim▁hole|> return None
return (m.start(1), m.group(1))
def look (s, sPattern, sNegPattern=None):
"seek sPattern in s (before/after/fulltext), if sNegPattern not in s"
if sNegPattern and re.search(sNegPattern, s):
return False
if re.search(sPattern, s):
return True
return False
def look_chk1 (dDA, s, nOffset, sPattern, sPatternGroup1, sNegPatternGroup1=None):
"returns True if s has pattern sPattern and m.group(1) has pattern sPatternGroup1"
m = re.search(sPattern, s)
if not m:
return False
try:
sWord = m.group(1)
nPos = m.start(1) + nOffset
except:
#print("Missing group 1")
return False
if sNegPatternGroup1:
return morphex(dDA, (nPos, sWord), sPatternGroup1, sNegPatternGroup1)
return morph(dDA, (nPos, sWord), sPatternGroup1, False)
#### Disambiguator
def select (dDA, nPos, sWord, sPattern, lDefault=None):
if not sWord:
return True
if nPos in dDA:
return True
if sWord not in _dAnalyses and not _storeMorphFromFSA(sWord):
return True
if len(_dAnalyses[sWord]) == 1:
return True
lSelect = [ sMorph for sMorph in _dAnalyses[sWord] if re.search(sPattern, sMorph) ]
if lSelect:
if len(lSelect) != len(_dAnalyses[sWord]):
dDA[nPos] = lSelect
#echo("= "+sWord+" "+str(dDA.get(nPos, "null")))
elif lDefault:
dDA[nPos] = lDefault
#echo("= "+sWord+" "+str(dDA.get(nPos, "null")))
return True
def exclude (dDA, nPos, sWord, sPattern, lDefault=None):
if not sWord:
return True
if nPos in dDA:
return True
if sWord not in _dAnalyses and not _storeMorphFromFSA(sWord):
return True
if len(_dAnalyses[sWord]) == 1:
return True
lSelect = [ sMorph for sMorph in _dAnalyses[sWord] if not re.search(sPattern, sMorph) ]
if lSelect:
if len(lSelect) != len(_dAnalyses[sWord]):
dDA[nPos] = lSelect
#echo("= "+sWord+" "+str(dDA.get(nPos, "null")))
elif lDefault:
dDA[nPos] = lDefault
#echo("= "+sWord+" "+str(dDA.get(nPos, "null")))
return True
def define (dDA, nPos, lMorph):
dDA[nPos] = lMorph
#echo("= "+str(nPos)+" "+str(dDA[nPos]))
return True
#### GRAMMAR CHECKER PLUGINS
${plugins}
${generated}<|fim▁end|> | m = _zPrevWord.search(s[:iEnd])
if not m: |
<|file_name|>42. Trapping rain water.py<|end_file_name|><|fim▁begin|>class Solution(object):
def trap(self, height):
"""
:type height: List[int]
:rtype: int
"""
l=len(height)
maxheight=[0 for i in range(l)]
leftmax=0
rightmax=0
res=0
for i in range(l):
if height[i]>leftmax:
leftmax=height[i]
maxheight[i]=leftmax<|fim▁hole|> for i in reversed(range(l)):
if height[i]>rightmax:
rightmax=height[i]
if min(rightmax,maxheight[i])-height[i]>0:
res+=min(rightmax,maxheight[i])-height[i]
return res<|fim▁end|> | |
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|><|fim▁hole|> 'start': start,
'finish': finish,
'switch': switch,
# 'publish': publish,
}<|fim▁end|> | from gitbarry.reasons import start, finish, switch # , switch, publish
REASONS = { |
<|file_name|>tc.gam.widget-visibility-handler.js<|end_file_name|><|fim▁begin|>/*--------------------------------------------------------------------
Copyright (c) 2011 Local Projects. All rights reserved.
Licensed under the Affero GNU GPL v3, see LICENSE for more details.
--------------------------------------------------------------------*/
tc.gam.widgetVisibilityHandler = function(options) {
var self = {
currentHash: window.location.hash,
previousHash: null
};
self._setHash = function(hash) {
if (hash === self.currentHash) {
tc.jQ(window).trigger('hashchange');
} else {
//This will trigger the 'hashchange' event because the hash is different
window.location.hash = hash;
}
};
self._getHash = function() {
return window.location.hash.substring(1, window.location.hash.length);
};
self._goHome = function() {<|fim▁hole|>
self._triggerWidgetVisibilityEvent = function(action, widget, id) {
tc.jQ(tc).trigger(action + '-project-widget', [widget, id]);
};
self._onHashChange = function(event) {
var action, widget;
self.previousHash = self.currentHash;
self.currentHash = self._getHash();
// For project-home hash, fire goHome.
if (!self.currentHash || self.currentHash === 'project-home') {
self._goHome();
} else {
action = self.currentHash.split(',')[0];
widget = self.currentHash.split(',')[1];
id = self.currentHash.split(',')[2];
}
tc.util.log('&&& hashchange: ' + action + ', ' + widget);
self._triggerWidgetVisibilityEvent(action, widget, id);
};
var bindEvents = function() {
tc.jQ(window).bind('hashchange', self._onHashChange);
};
var init = function() {
bindEvents();
if (self.currentHash) {
self._setHash(self.currentHash);
} else {
self._goHome();
}
};
init();
return self;
};<|fim▁end|> | self._setHash('show,home');
}; |
<|file_name|>connection.rs<|end_file_name|><|fim▁begin|>use libc;
use std::{mem, net, ptr};
use std::io::{Error, Result};
use std::cell::RefCell;
use std::ops::Deref;
use Event;
use socket;
pub struct Connection {
pub client_fd: i32,
pub client_addr: net::SocketAddr,<|fim▁hole|>
pub read_size: usize,
pub write_size: usize,
pub read_buf: Vec<u8>,
pub write_buf: Vec<u8>,
}
const WEB_WELCOME: &str = "HTTP/1.1 200 OK
Date: Tue, 31 Oct 2017 13:40:35 GMT
Content-Type: text/html; charset=utf-8
<h1>Welcome to Nest!</h1>";
impl Connection {
pub fn new() -> Connection {
Connection {
client_fd: -1,
client_addr: unsafe { mem::uninitialized() },
client_addr_len: 0,
accept_total: 0,
read_size: 0,
write_size: 0,
read_buf: Vec::new(),
write_buf: WEB_WELCOME.as_bytes().to_vec(),
}
}
pub fn event_accept(e: &mut Event, ev: &libc::kevent) {
let mut count = ev.data;
println!("DEBUG: Have {:?} need accept", count);
for index in 0..count {
let mut client_addr: libc::sockaddr = unsafe { mem::uninitialized() };
let mut client_len = mem::size_of::<libc::sockaddr>() as libc::socklen_t;
let client_fd = unsafe {
libc::accept(
e.local_sock_fd,
&mut client_addr as *mut libc::sockaddr,
&mut client_len as *mut libc::socklen_t
)
};
if client_fd == -1 {
let err = Error::last_os_error();
let errno = err.raw_os_error();
println!("DEBUG: Accept failed. Error: {:?}", err);
match errno.unwrap() {
libc::EINTR => {
// The accept() operation was interrupted.
// need retry
println!("ERROR: accept() interrupted. Index: {:?}", index);
return;
},
libc::EMFILE | libc::ENFILE => {
// The per-process descriptor table is full.
// The system file table is full.
// set_event(eq, eq.listen, libc::EVFILT_READ, libc::EV_DELETE|libc::EV_DISABLE, ptr::null_mut());
//let listen_fd = c.queue.local_sock_fd;
//c.queue.set(listen_fd, libc::EVFILT_READ, libc::EV_ADD|libc::EV_DISABLE, ptr::null_mut());
println!("ERROR: File table is full. Delete with disable read event. Index: {:?}", index);
return;
},
libc::EAGAIN => {
// EWOULDBLOCK = EAGAIN in libc
// The socket is marked non-blocking and no connections are present to be accepted.
println!("ERROR: accept() not ready. Index: {:?}", index);
return;
},
libc::ECONNABORTED => {
// A connection arrived, but it was closed while waiting on the listen queue.
println!("ERROR: Client closed. Index: {:?}", index);
return;
},
_ => println!("DEBUG: Not match errno. Index: {:?}", index),
}
return;
}
socket::nonblocking(client_fd);
let c = Box::new(Connection {
client_fd: client_fd,
client_addr: socket::to_std_socket_addr(&client_addr),
client_addr_len: client_len,
accept_total: index as usize,
read_size: 0,
write_size: 0,
read_buf: Vec::new(),
write_buf: "Welcome >>>>>>>>>>>>>>> to server".as_bytes().to_vec(),
});
println!("DEBUG: Accept. IP: {:?}, Index: {:?}, fd: {:?}", c.client_addr, c.accept_total, client_fd);
//let raw_ptr_c: *mut _ = &mut *c;
let raw_ptr_c = Box::into_raw(c);
e.set(client_fd, libc::EVFILT_READ, libc::EV_ADD|libc::EV_ENABLE, raw_ptr_c as *mut libc::c_void);
count -= 1;
}
}
pub fn event_read(e: &mut Event, ev: &libc::kevent) {
let mut c = unsafe { Box::from_raw(ev.udata as *mut Connection) };
let fd = ev.ident as i32;
let mut readable_len = ev.data;
println!("DEBUG: read event. readability count: {:?}, fd: {:?}", readable_len, fd);
if readable_len == 0 {
return;
}
while readable_len > 0 {
let mut buf: [u8; super::MAX_BUFFER] = unsafe { mem::uninitialized() };
let recv_len = unsafe { libc::recv(fd, buf.as_mut_ptr() as *mut libc::c_void, super::MAX_BUFFER, 0) };
if recv_len == 0 {
// client closed.
let err = Error::last_os_error();
println!("DEBUG: recv() failed. client closed. error: {:?}", err);
socket::close(fd);
return;
}
else if recv_len < 0 { // recv_len == -1
let err = Error::last_os_error();
let errno = err.raw_os_error();
println!("DEBUG: recv() faild. error: {:?}", err);
match errno.unwrap() {
libc::EAGAIN | libc::EINTR=> {
/*
The socket is marked non-blocking and the receive
operation would block, or a receive timeout had been
set and the timeout expired before data were received.
EINTR: The receive was interrupted by delivery of a signal before any data were available.
try again
*/
println!("DEBUG: recv() not ready. error: {:?}", err);
continue;
},
libc::ECONNRESET => {
/*
ECONNRESET: The remote socket end is forcibly closed.
*/
println!("DEBUG: recv() failed. remote socket closed. error: {:?}", err);
socket::close(fd);
return;
},
_ => {
println!("DEBUG: recv() failed. not match error.");
socket::close(fd);
return;
}
}
}
readable_len -= recv_len;
c.read_size += recv_len as usize;
c.read_buf.extend_from_slice(&buf[0..recv_len as usize]);
let str_buf = &buf[0..recv_len as usize];
println!("DEBUG: read count: {:?}", recv_len);
println!("DEBUG: read context: {:?}", String::from_utf8_lossy(str_buf));
if readable_len <= 0 || recv_len < super::MAX_BUFFER as isize {
// read completed
println!("DEBUG: read completed. len: {:?}, full data: \n{:?}", c.read_size, String::from_utf8_lossy(c.read_buf.as_slice()));
let raw_ptr_c = Box::into_raw(c);
e.set(fd, libc::EVFILT_WRITE, libc::EV_ADD|libc::EV_ENABLE, raw_ptr_c as *mut libc::c_void);
return;
}
}
}
pub fn event_write(ev: &libc::kevent) {
let mut c = unsafe { Box::from_raw(ev.udata as *mut Connection) };
let fd = ev.ident as i32;
let writable_len = ev.data;
let buf = WEB_WELCOME.as_bytes();
println!("DEBUG: wite event. writable count: {:?}, fd: {:?}", writable_len, fd);
while c.write_size < buf.len() {
let mut should_len = writable_len as usize;
let last_len = buf.len() - c.write_size;
if should_len >= last_len {
should_len = last_len;
}
let n = unsafe {
libc::send(fd, &buf[c.write_size..should_len] as *const _ as *const libc::c_void, should_len, 0)
};
if n == 0 {
// send() returned zero.
let err = Error::last_os_error();
println!("ERROR: send() returned zero. Error: {:?}", err);
continue;
}
else if n < 0 {
let err = Error::last_os_error();
let errno = err.raw_os_error();
match errno.unwrap() {
libc::EAGAIN => {
// The socket is marked non-blocking and the requested operation would block.
// try again
//c.queue.set(fd, libc::EVFILT_WRITE, libc::EV_ADD|libc::EV_ENABLE, ptr::null_mut());
println!("ERROR: send() not ready. Error: {:?}", err);
continue;
},
libc::ENOBUFS => {
/*
ENOBUFS: The system was unable to allocate an internal buffer.
The operation may succeed when buffers become avail-able.
ENOBUFS: The output queue for a network interface was full.
This generally indicates that the interface has
stopped sending, but may be caused by transient con-gestion.
try again.
*/
//c.queue.set(fd, libc::EVFILT_WRITE, libc::EV_ADD|libc::EV_ENABLE, ptr::null_mut());
println!("ERROR: send() not ready. Error: {:?}", err);
socket::close(fd);
return;
},
libc::EHOSTUNREACH | libc::EHOSTDOWN | libc::ENETDOWN => {
/*
EHOSTUNREACH: The remote host was unreachable.
EHOSTDOWN: The remote host was down.
ENETDOWN: The remote network was down.
*/
println!("ERROR: send() remote closed. Error: {:?}", err);
socket::close(fd);
return;
},
libc::EISCONN => {
// A destination address was specified and the socket is already connected.
// try again.
//c.queue.set(fd, libc::EVFILT_WRITE, libc::EV_ADD|libc::EV_ENABLE, ptr::null_mut());
println!("ERROR: send() not ready. Error: {:?}", err);
socket::close(fd);
continue;
},
libc::ECONNREFUSED => {
/*
The socket received an ICMP destination unreachable
message from the last message sent. This typically
means that the receiver is not listening on the remote
port.
*/
println!("ERROR: send() can not to target. Error: {:?}", err);
socket::close(fd);
return;
},
libc::EPIPE => {
/*
The socket is unable to send anymore data (SBS_CANTSENDMORE has been set on the socket).
This typically means that the socket is not connected.
{ code: 32, message: "Broken pipe" }
*/
println!("ERROR: send() -> EPIPE, the socket is not connected. Error: {:?}", err);
socket::close(fd);
return;
}
_ => {
println!("ERROR: send() failed not match. Error: {:?}", err);
socket::close(fd);
return;
},
}
}
c.write_size += n as usize;
println!("DEBUG: write count: {:?}", n);
println!("DEBUG: write context: {:?}", String::from_utf8_lossy(&buf[c.write_size..should_len]));
if c.write_size >= c.write_buf.len() {
socket::close(fd);
return;
}
}
}
}
impl Drop for Connection {
fn drop(&mut self) {
println!("DEBUG: Connection drop. fd: {:?}", self.client_fd);
let _ = unsafe { libc::close(self.client_fd) };
}
}<|fim▁end|> | pub client_addr_len: u32,
pub accept_total: usize, |
<|file_name|>neighs_info.py<|end_file_name|><|fim▁begin|>"""
neighs information
------------------
Auxiliar class in order to manage the information of the neighbourhood
returned by the retrievers.
Due to the complexity of the structure it is convenient to put altogether
in a single class and manage in a centralized way all the different
interactions with neighs_info in the whole package.
possible inputs
---------------
* integer {neighs}
* list of integers {neighs}
* list of lists of integers {neighs for some iss}
* list of lists of lists of integers {neighs for some iss and ks}
* numpy array 1d, 2d, 3d {neighs}
* tuple of neighs
standart storing
----------------
- neighs:
- array 3d (ks, iss, neighs)
- lists [ks][iss][neighs]
- list arrays: [ks](iss, neighs), [ks][iss](neighs)
- sp_relative_pos:
- array 3d (ks, iss, neighs)
- lists [ks][iss][neighs]
- list arrays [ks](iss, neighs), [ks][iss](neighs)
standart output
---------------
- neighs:
- array 3d (ks, iss, neighs)
- lists [ks][iss][neighs]
- list arrays: [ks](iss, neighs), [ks][iss](neighs)
Parameters
----------
staticneighs: all the ks have the same information. They are static.
It is useful information for the getters. The information is stored with
deep=2.
staticneighs_set: all the same information but it is setted as if there was
set with deep=3. If True, deep=2, if False, deep=3.
constant_neighs: all the iss have the same number of neighs for all ks.
level: the format level expected. First one is only neighs, second one has
different iss and the third one different ks.
_kret: maximum number of perturbations of the system. It could be useful for
open systems expressed in a staticneighs way to find errors or delimitate
ouptut.
n: maximum number of id of elements retrieved.
"""
import numpy as np
from copy import deepcopy
import warnings
warnings.filterwarnings("always")
from auxiliar_joinning_neighs import join_neighsinfo_AND_static_dist,\
join_neighsinfo_OR_static_dist, join_neighsinfo_XOR_static_dist,\
join_neighsinfo_AND_static_notdist, join_neighsinfo_OR_static_notdist,\
join_neighsinfo_XOR_static_notdist, join_neighsinfo_AND_notstatic_dist,\
join_neighsinfo_OR_notstatic_dist, join_neighsinfo_XOR_notstatic_dist,\
join_neighsinfo_AND_notstatic_notdist,\
join_neighsinfo_OR_notstatic_notdist, join_neighsinfo_XOR_notstatic_notdist
pos_structure = [None, 'raw', 'tuple', 'tuple_only', 'tuple_tuple',
'list_tuple_only', 'tuple_list_tuple']
pos_levels = [None, 0, 1, 2, 3]
pos_format_set_iss = [None, "general", "null", "int", "list"]
pos_types_neighs = [None, "general", "list", "array", "slice"]
pos_types_rel_pos = [None, "general", "list", "array"]
inttypes = [int, np.int32, np.int64]
class Neighs_Info:
"""Class to store, move and manage the neighbourhood information retrieved.
"""
type_ = "pySpatialTools.Neighs_Info"
def __init__(self, constant_neighs=False, kret=1, format_structure=None,
n=0, format_get_info=None, format_get_k_info=None,
format_set_iss=None, staticneighs=None, ifdistance=None,
type_neighs=None, type_sp_rel_pos=None, format_level=None):
"""The instanciation of the container object for all the neighbourhood
information.
Parameters
----------
constant_neighs: boolean (default=False)
if there are always the same number of neighs across all the
possible neighs.
kret: int (default=1)
the total perturbations applied (maximum k size).
format_structure: str, optional (default=None)
the type of structure in which we are going to set the
neighbourhood information.
n: int (default=0)
the maximum number of possible neighs code.
format_get_info: str optional (default=None)
in which format the information is returned to the user.
format_get_k_info: str optional (default=None)
in which format of the ks we set.
format_set_iss: str optional (default=None)
in which format of elements iss we set.
staticneighs: boolean (default=None)
if there is constant neighbourhood across the perturbations.
ifdistance: boolean (default=None)
if we set the distance or the relative position information.
type_neighs: str optional (default=None)
the type of object describing the neighs of the neighbourhood.
type_sp_rel_pos: str optional (default=None)
the type of object describing the relative position of the
neighbourhood.
format_level: int (default=None)
the level in which the information of the neighborhood will be set.
"""
## Initialize class
self._set_init()
## Extra info
self._constant_neighs = constant_neighs
# Constrain information
self._kret = kret
self._n = n
# Setting and formatting information
self.format_set_info = format_structure, type_neighs, type_sp_rel_pos,\
format_set_iss
self.format_get_info = format_get_info, format_get_k_info
## Formatters
# Global information
self._format_globalpars(staticneighs, ifdistance, format_level)
# Format setters
self._format_setters(format_structure, type_neighs,
type_sp_rel_pos, format_set_iss)
# Format getters
self._format_getters(format_get_info, format_get_k_info)
# Format joining
self._format_joining_functions()
def __iter__(self):
"""Get information sequentially.
Returns
-------
neighs: list or np.ndarray
the neighs information for each element `i`.
sp_relpos: list or np.ndarray
the relative position information for each element `i`.
ks: list or np.ndarray
the perturbations indices associated with the returned information.
iss: list or np.ndarray
the indices of the elements we stored their neighbourhood.
"""
for i in range(len(self.ks)):
yield self.get_neighs([i]), self.get_sp_rel_pos([i]),\
[self.ks[i]], self.iss
def empty(self):
"""If it is empty."""
return not self.any()
def any(self):
"""If it is not empty."""
boolean = True
if type(self.idxs) == np.ndarray:
boolean = all(self.idxs.shape)
elif type(self.idxs) == list:
sh = np.array(self.idxs).shape
if len(sh) >= 2:
boolean = np.all(sh)
return boolean
def reset(self):
"""Reset all the class to empty all the neighbourhood information."""
self._set_init()
def copy(self):
"""Deep copy of the container."""
return deepcopy(self)
@property
def shape(self):
"""Return the number of indices, neighbours and ks considered. For
irregular cases the neighbours number is set as None.
Returns
-------
sh0: int
the number of elements we want to get their neighbourhood.
sh1: int
the number of neighs they have it is constant.
sh2: int
the number of perturbations applied.
"""
if not self._setted:
return None, None, None
if type(self.idxs) == slice:
sh0 = len(self.iss)
step = self.idxs.step
sh1 = (self.idxs.stop + step - 1 - self.idxs.start)/step
sh1 = 0 if self.ks is None else len(self.ks)
elif type(self.idxs) == np.ndarray:
sh0 = 0 if self.idxs is None else len(self.idxs)
sh1 = 0 if self.idxs is None else self.idxs.shape[1]
elif type(self.idxs) == list:
sh0 = len(self.idxs)
sh1 = len(self.idxs[0])
sh2 = len(self.ks) if self.ks is not None else None
return sh0, sh1, sh2
###########################################################################
############################ GENERAL SETTINGS #############################
###########################################################################
def set_information(self, k_perturb=0, n=0):
"""Set specific global information.
Parameters
----------
kret: int (default=0)
the total perturbations applied (maximum k size).
n: int (default=0)
the maximum number of possible neighs code.
"""
self._n = n
self._kret = k_perturb
def _set_ks_static(self, ks):
"""External set ks for staticneighs.
Parameters
----------
ks: list or np.ndarray
the perturbations indices associated with the stored information.
"""
self.ks = ks
if np.max(self.ks) > self._kret:
self._kret = np.max(self.ks)
def _set_ks_dynamic(self, ks):
"""External set ks for non-staticneighs.
Parameters
----------
ks: list or np.ndarray
the perturbations indices associated with the stored information.
"""
assert(len(ks) == len(self.idxs))
self.ks = ks
if np.max(self.ks) > self._kret:
self._kret = np.max(self.ks)
def direct_set(self, neighs, sp_relative_pos=None):
"""Direct set of neighs_info.
Parameters
----------
neighs: list or np.ndarray
the neighs information for each element `i` and for each
perturbation `k`.
sp_relpos: list or np.ndarray (default=None)
the relative position information for each element `i` and for each
perturbation `k`.
"""
self.idxs = neighs
self.sp_relative_pos = sp_relative_pos
self.assert_goodness()
def reset_functions(self):
"""Reset the function regarding the parameters set."""
if type(self.idxs) == list:
type_neighs = 'list'
elif type(self.idxs) == slice:
type_neighs = 'slice'
elif type(self.idxs) == np.ndarray:
type_neighs = 'array'
if type(self.sp_relative_pos) == list:
type_sp_rel_pos = 'list'
elif type(self.sp_relative_pos) == np.ndarray:
type_sp_rel_pos = 'array'
else:
type_sp_rel_pos = None
self.set_types(type_neighs, type_sp_rel_pos)
def reset_structure(self, format_structure):
"""Reset structure regarding the parameters set and the
`format_structure` input.
Parameters
----------
format_structure: str, optional
the type of structure in which we are going to set the
neighbourhood information.
"""
assert(format_structure in pos_structure)
_, aux1, aux2, aux3 = self.format_set_info
self.format_set_info = format_structure, aux1, aux2, aux3
self.reset_format()
def reset_level(self, format_level):
"""Reset level regarding the parameters set and the new input.
Parameters
----------
format_level: int
the level in which the information of the neighborhood will be set.
"""
assert(format_level in pos_levels)
self.level = format_level
self.reset_format()
def reset_format(self):
"""Reset format regarding the parameters set."""
## Formatters
self._format_setters(*self.format_set_info)
self._format_getters(*self.format_get_info)
self._format_joining_functions()
def set_types(self, type_neighs=None, type_sp_rel_pos=None):
"""Set type of objects in which the information will be given.
Parameters
----------
type_neighs: str optional (default=None)
the type of object describing the neighs of the neighbourhood.
type_sp_rel_pos: str optional (default=None)
the type of object describing the relative position of the
neighbourhood.
"""
## 1. Set set_sp_rel_pos
self.type_neighs, self.type_sp_rel_pos = type_neighs, type_sp_rel_pos
if self.ifdistance is False:
self.set_sp_rel_pos = self._null_set_rel_pos
self.get_sp_rel_pos = self._null_get_rel_pos
else:
self.get_sp_rel_pos = self._general_get_rel_pos
if self.level < 2:
self.get_sp_rel_pos = self._static_get_rel_pos
if type_sp_rel_pos is None or type_sp_rel_pos == 'general':
self.set_sp_rel_pos = self._general_set_rel_pos
elif type_sp_rel_pos == 'array':
if self.level is None:
self.set_sp_rel_pos = self._set_rel_pos_general_array
elif self.level == 0:
self.set_sp_rel_pos = self._set_rel_pos_dim
elif self.level == 1:
self.set_sp_rel_pos = self._array_only_set_rel_pos
elif self.level == 2:
self.set_sp_rel_pos = self._array_array_set_rel_pos
elif self.level == 3:
self.set_sp_rel_pos = self._array_array_array_set_rel_pos
elif type_sp_rel_pos == 'list':
if self.level is None:
self.set_sp_rel_pos = self._set_rel_pos_general_list
elif self.level == 0:
self.set_sp_rel_pos = self._set_rel_pos_dim
elif self.level == 1:
self.set_sp_rel_pos = self._list_only_set_rel_pos
elif self.level == 2:
self.set_sp_rel_pos = self._list_list_only_set_rel_pos
elif self.level == 3:
self.set_sp_rel_pos = self._list_list_set_rel_pos
## 2. Set set_neighs
if type_neighs is None or type_neighs == 'general':
self.set_neighs = self._general_set_neighs
elif type_neighs == 'array':
# Format get neighs
if self.staticneighs:
self.get_neighs = self._get_neighs_array_static
else:
self.get_neighs = self._get_neighs_array_dynamic
# Format set neighs
if self.level is None:
self.set_neighs = self._set_neighs_general_array
elif self.level == 0:
self.set_neighs = self._set_neighs_number
elif self.level == 1:
self.set_neighs = self._set_neighs_array_lvl1
elif self.level == 2:
self.set_neighs = self._set_neighs_array_lvl2
elif self.level == 3:
self.set_neighs = self._set_neighs_array_lvl3
elif type_neighs == 'list':
# Format get neighs
if self._constant_neighs:
if self.staticneighs:
self.get_neighs = self._get_neighs_array_static
else:
self.get_neighs = self._get_neighs_array_dynamic
else:
if self.staticneighs:
self.get_neighs = self._get_neighs_list_static
else:
self.get_neighs = self._get_neighs_list_dynamic
# Format set neighs
if self.level is None:
self.set_neighs = self._set_neighs_general_list
elif self.level == 0:
self.set_neighs = self._set_neighs_number
elif self.level == 1:
self.set_neighs = self._set_neighs_list_only
elif self.level == 2:
self.set_neighs = self._set_neighs_list_list
elif self.level == 3:
self.set_neighs = self._set_neighs_list_list_list
elif type_neighs == 'slice':
self.set_neighs = self._set_neighs_slice
self.get_neighs = self._get_neighs_slice
self.staticneighs_set = True
def set_structure(self, format_structure=None):
"""Set the structure in which the neighbourhood information will be
given.
Parameters
----------
format_structure: str, optional (default=None)
the type of structure in which we are going to set the
neighbourhood information.
"""
if format_structure is None:
self._set_info = self._set_general
elif format_structure == 'raw':
self._set_info = self._set_raw_structure
self.ifdistance = False
self.set_sp_rel_pos = self._null_set_rel_pos
self.get_sp_rel_pos = self._null_get_rel_pos
elif format_structure == 'tuple':
self._set_info = self._set_tuple_structure
self.set_sp_rel_pos = self._null_set_rel_pos
self.get_sp_rel_pos = self._null_get_rel_pos
elif format_structure == 'tuple_only':
self._set_info = self._set_tuple_only_structure
elif format_structure == 'tuple_k':
self._set_info = self._set_tuple_k_structure
elif format_structure == 'tuple_tuple':
self._set_info = self._set_tuple_tuple_structure
elif format_structure == 'list_tuple_only':
# assert(self.level == 2)
self._set_info = self._set_list_tuple_only_structure
self.staticneighs_set = False
if self.level != 2:
raise Exception("Not correct inputs.")
else:
self.level = 3
elif format_structure == 'tuple_list_tuple':
# assert(self.level == 2)
self._set_info = self._set_tuple_list_tuple_structure
self.staticneighs_set = False
if self.level != 2:
raise Exception("Not correct inputs.")
else:
self.level = 3
###########################################################################
################################# FORMATS #################################
###########################################################################
############################### Formatters ################################
###########################################################################
def _format_globalpars(self, staticneighs, ifdistance, format_level):
"""Global information non-mutable and mutable in order to force or keep
other information and functions.
Parameters
----------
staticneighs: boolean
if there is constant neighbourhood across the perturbations.
ifdistance: boolean
if we set the distance or the relative position information.
format_level: int
the level in which the information of the neighborhood will be set.
"""
## Basic information how it will be input neighs_info
self.level = format_level
## Global known information about relative position
self.ifdistance = ifdistance
## Global known information about get information
self.staticneighs = staticneighs
## Setting changable information about static neighs setting
self.staticneighs_set = None
if self.level is None:
self.staticneighs_set = None
elif self.level <= 2:
self.staticneighs_set = True
if self.level == 3:
self.staticneighs_set = False
def _format_setters(self, format_structure, type_neighs=None,
type_sp_rel_pos=None, format_set_iss=None):
"""Format the setter functions.
Parameters
----------
format_structure: str, optional
the type of structure in which we are going to set the
neighbourhood information.
type_neighs: str optional (default=None)
the type of object describing the neighs of the neighbourhood.
type_sp_rel_pos: str optional (default=None)
the type of object describing the relative position of the
neighbourhood.
format_set_iss: str optional (default=None)
in which format of elements iss we set.
"""
## 1. Format structure
self.set_structure(format_structure)
## 2. Set types
self.set_types(type_neighs, type_sp_rel_pos)
## 3. Post-format
if self._constant_neighs:
self._main_postformat = self._cte_postformat
else:
self._main_postformat = self._null_postformat
self._iss_postformat = self._assert_iss_postformat
self._ks_postformat = self._assert_ks_postformat
if self._constant_neighs and type_neighs != 'slice':
self._idxs_postformat = self._idxs_postformat_array
else:
self._idxs_postformat = self._idxs_postformat_null
## 4. Format iss
self._format_set_iss(format_set_iss)
## 5. Format set ks
if self.staticneighs:
self.set_ks = self._set_ks_static
else:
self.set_ks = self._set_ks_dynamic
## 6. General set
self.set = self._general_set
def _format_set_iss(self, format_set_iss=None):
"""Format the setter iss function.
Parameters
----------
format_set_iss: str optional (default=None)
in which format of elements iss we set.
"""
## Format iss
if format_set_iss is None or format_set_iss == 'general':
self._set_iss = self._general_set_iss
elif format_set_iss == 'null':
self._set_iss = self._null_set_iss
elif format_set_iss == 'int':
self._set_iss = self._int_set_iss
elif format_set_iss == 'list':
self._set_iss = self._list_set_iss
def _format_getters(self, format_get_info=None, format_get_k_info=None):
"""Function to program this class according to the stored idxs.
Parameters
----------
format_get_info: str optional (default=None)
in which format the information is returned to the user.
format_get_k_info: str optional (default=None)
in which format of the ks we set.
"""
## Get info setting
if format_get_k_info is None:
self.get_k = self._general_get_k
elif format_get_k_info == "default":
self.get_k = self._default_get_k
elif format_get_k_info == "general":
self.get_k = self._general_get_k
elif format_get_k_info == "list":
self.get_k = self._list_get_k
elif format_get_k_info == "integer":
self.get_k = self._integer_get_k
## Get information setting
if format_get_info is None:
self.get_information = self._general_get_information
elif format_get_info == "default":
self.get_information = self._default_get_information
elif format_get_info == "general":
self.get_information = self._general_get_information
## Other getters
if self.staticneighs:
self.get_copy_iss = self._staticneighs_get_copy_iss
self.get_copy_iss_by_ind = self._staticneighs_get_copy_iss_by_ind
else:
self.get_copy_iss = self._notstaticneighs_get_copy_iss
self.get_copy_iss_by_ind =\
self._notstaticneighs_get_copy_iss_by_ind
def _postformat(self):
"""Format properly."""
self._main_postformat()
self._iss_postformat()
self._assert_ks_postformat()
self._idxs_postformat()
def _cte_postformat(self):
"""To array because of constant neighs."""
# if type(self.idxs) == list:
# self.idxs = np.array(self.idxs)
if self.sp_relative_pos is not None:
if type(self.sp_relative_pos) == list:
self.sp_relative_pos = np.array(self.sp_relative_pos)
def _assert_iss_postformat(self):
"""Assert if the iss is correctly formatted, if not, format properly.
"""
if type(self.idxs) in [list, np.ndarray]:
# print self.idxs, self.iss, self.set_neighs
if self.staticneighs:
### WARNING: Redefinition of iss.
if len(self.idxs) != len(self.iss):
if len(self.idxs[0]) == len(self.iss):
self.idxs = self.idxs[0]
else:
self.iss = range(len(self.idxs))
else:
assert(all([len(k) == len(self.idxs[0]) for k in self.idxs]))
def _assert_ks_postformat(self):
"""Assert proper postformatting for the ks."""
if type(self.idxs) in [list, np.ndarray]:
if self.ks is None:
if self.staticneighs:
pass
else:
self.ks = range(len(self.idxs))
if self.staticneighs:
pass
else:
# print self.ks, self.idxs, self.set_neighs, self.set_sp_rel_pos
assert(len(self.ks) == len(self.idxs))
## Defining functions
if self.sp_relative_pos is not None and self.staticneighs:
self.get_sp_rel_pos = self._static_get_rel_pos
elif not self.staticneighs:
if type(self.sp_relative_pos) == list:
self.get_sp_rel_pos = self._dynamic_rel_pos_list
else:
self.get_sp_rel_pos = self._dynamic_rel_pos_array
if self.sp_relative_pos is None:
self.set_sp_rel_pos = self._null_set_rel_pos
self.get_sp_rel_pos = self._null_get_rel_pos
## Ensure correct k_ret
if np.max(self.ks) > self._kret:
self._kret = np.max(self.ks)
# def _array_ele_postformat(self, ele):
# return np.array(ele)
#
# def _null_ele_postformat(self, ele):
# return ele
def _null_postformat(self):
"""Not change anything."""
pass
def _idxs_postformat_array(self):
"""The neighs information postformatting. It format in an array-form
the neighs stored in the instance.
"""
self.idxs = np.array(self.idxs)
def _idxs_postformat_null(self):
"""The neighs information postformatting. It doesnt change the format.
"""
pass
###########################################################################
################################## SETS ###################################
###########################################################################
########################### Setters candidates ############################
###########################################################################
def _general_set(self, neighs_info, iss=None):
"""General set.
Parameters
----------
neighs_info: int, float, slice, np.ndarray, list, tuple or instance
the neighbourhood information given with the proper indicated
structure.
iss: list or np.ndarray (default=None)
the indices of the elements we stored their neighbourhood.
"""
## Set function
self._preset(neighs_info, iss)
## Post-set functions
self._postset()
self.assert_goodness()
def _preset(self, neighs_info, iss=None):
"""Set the class.
Parameters
----------
neighs_info: int, float, slice, np.ndarray, list, tuple or instance
the neighbourhood information given with the proper indicated
structure.
iss: list or np.ndarray (default=None)
the indices of the elements we stored their neighbourhood.
"""
self._reset_stored()
self._set_iss(iss)
self._set_info(neighs_info)
self._postformat()
def _postset(self):
"""Postsetting class."""
if type(self.idxs) == np.ndarray:
pass
if type(self.idxs) == slice:
self.get_neighs = self._get_neighs_slice
elif type(self.idxs) == np.ndarray:
# if len(self.idxs.shape) == 3 and self.ks is None:
# self.ks = list(range(len(self.idxs)))
# else:
# self.staticneighs_set = True
if self.staticneighs:
self.get_neighs = self._get_neighs_array_static
else:
self.get_neighs = self._get_neighs_array_dynamic
elif type(self.idxs) == list:
if self.staticneighs:
self.get_neighs = self._get_neighs_list_static
else:
self.get_neighs = self._get_neighs_list_dynamic
## Format coreget by iss
if type(self.idxs) == slice:
self._staticneighs_get_corestored_by_inds =\
self._staticneighs_get_corestored_by_inds_slice
self._notstaticneighs_get_corestored_by_inds =\
self._notstaticneighs_get_corestored_by_inds_slice
else:
self._staticneighs_get_corestored_by_inds =\
self._staticneighs_get_corestored_by_inds_notslice
self._notstaticneighs_get_corestored_by_inds =\
self._notstaticneighs_get_corestored_by_inds_notslice
def _set_init(self):
"""Reset variables to default."""
## Main information
self.idxs = None
self.sp_relative_pos = None
## Auxiliar information
self.ks = None
self.iss = [0]
## Class structural information
self._setted = False
self._constant_rel_pos = False
self.staticneighs = None
self.staticneighs_set = None
def _reset_stored(self):
"""Reset the stored parameters and neighbourhood information."""
## Main information
self.idxs = None
self.sp_relative_pos = None
self._setted = False
self.ks = None
self.iss = [0]
def _set_general(self, neighs_info):
"""Setting neighs info with heterogenous ways to do it.
Parameters
----------
neighs_info: int, float, slice, np.ndarray, list, tuple or instance
the neighbourhood information given with the proper indicated
structure. The standards of the inputs are:
* neighs [int, float, list, slice or np.ndarray]
* (i, k)
* (neighs, k)
* (neighs_info, k) where neighs_info is a tuple which could
contain (neighs, dists) or (neighs,)
* neighs_info in the form of pst.Neighs_Info
"""
## 0. Format inputs
# If int is a neighs
if type(neighs_info) in [int, float, np.int32, np.int64, np.float]:
self._set_neighs_number(neighs_info)
self.set_sp_rel_pos = self._null_set_rel_pos
self.get_sp_rel_pos = self._null_get_rel_pos
# If slice is a neighs
elif type(neighs_info) == slice:
self._set_neighs_slice(neighs_info)
self.set_sp_rel_pos = self._null_set_rel_pos
self.get_sp_rel_pos = self._null_get_rel_pos
# If array is a neighs
elif type(neighs_info) == np.ndarray:
self._set_neighs_general_array(neighs_info)
self.set_sp_rel_pos = self._null_set_rel_pos
self.get_sp_rel_pos = self._null_get_rel_pos
# If int could be neighs or list of tuples
elif type(neighs_info) == list:
self._set_structure_list(neighs_info)
# If tuple there are more information than neighs
elif type(neighs_info) == tuple:
self._set_structure_tuple(neighs_info)
else:
assert(type(neighs_info).__name__ == 'instance')
## Substitution main information
self.idxs = neighs_info.idxs
self.ks = neighs_info.ks
self.iss = neighs_info.iss
## Copying class information
self._constant_neighs = neighs_info._constant_neighs
self._kret = neighs_info._kret
self._n = neighs_info._n
self.format_set_info = neighs_info.format_set_info
self.format_get_info = neighs_info.format_get_info
self._format_globalpars(neighs_info.staticneighs,
neighs_info.ifdistance, neighs_info.level)
self._format_setters(*neighs_info.format_set_info)
self._format_getters(*neighs_info.format_get_info)
self._format_joining_functions()
############################## Set Structure ##############################
###########################################################################
def _set_raw_structure(self, key):
"""Set the neighbourhood information in a form of raw structure.
Parameters
----------
neighs_info: tuple
the neighborhood information for each element `i` and perturbations
`k`. The standards to set that information are:
* neighs{any form}
"""
self.set_neighs(key)
self.ifdistance = False
def _set_structure_tuple(self, key):
"""Set the neighbourhood information in a form of tuple general.
Parameters
----------
neighs_info: tuple
the neighborhood information for each element `i` and perturbations
`k`. The standards to set that information are:
* (neighs, )
* (neighs_info{any form}, ks)
* (neighs_info{list of typle only}, ks)
* (neighs{any form}, sp_relative_pos{any form})
* ((neighs{any form}, sp_relative_pos{any form}), ks)
* (neighs_info{list of typle only}, ks)
"""
if len(key) == 2:
msg = "Ambiguous input in `set` function of pst.Neighs_Info."
warnings.warn(msg, SyntaxWarning)
if type(key[0]) == tuple:
self.ks = list(np.array([key[1]]).ravel())
self._set_structure_tuple(key[0])
else:
aux_bool = type(key[0]) in [np.ndarray, list]
if type(key[0]) == list and type(key[0][0]) == tuple:
self._set_tuple_list_tuple_structure(key)
elif type(key[0]) == type(key[1]) and aux_bool:
if len(key[0]) == len(key[1]):
self._set_tuple_only_structure(key)
else:
self.ks = list(np.array(key[1]))
self.set_neighs(key[0])
else:
self._set_tuple_only_structure(key)
else:
self.set_neighs(key[0])
def _set_tuple_structure(self, key):
"""Set the neighbourhood information in a form of tuple structure.
Parameters
----------
neighs_info: tuple
the neighborhood information for each element `i` and perturbations
`k`. The standards to set that information are:
* (neighs_info{any form}, ks)
"""
if len(key) == 2:
self.ks = list(np.array(key[1]))
self.set_neighs(key[0])
def _set_tuple_only_structure(self, key):
"""Set the neighbourhood information in a form of tuple only structure.
Parameters
----------
neighs_info: tuple
the neighborhood information for each element `i` and perturbations
`k`. The standards to set that information are:
* (neighs{any form}, sp_relative_pos{any form})
"""
self.set_neighs(key[0])
if len(key) == 2:
self.set_sp_rel_pos(key[1])
elif len(key) > 2:
raise TypeError("Not correct input.")
def _set_tuple_tuple_structure(self, key):
"""Set the neighbourhood information in a form of tuple tuple
structure.
Parameters
----------
neighs_info: tuple
the neighborhood information for each element `i` and perturbations
`k`. The standards to set that information are:
* ((neighs{any form}, sp_relative_pos{any form}), ks)
"""
if len(key) == 2:
ks = [key[1]] if type(key[1]) == int else key[1]
self.ks = list(np.array([ks]).ravel())
self._set_tuple_only_structure(key[0])
# def _set_tuple_list_tuple_only(self, key):
# """
# * (neighs_info{list of typle only}, ks)
# """
# self.ks = list(np.array(key[1]))
# self._set_list_tuple_only_structure(key[0])
def _set_tuple_k_structure(self, key):
"""Set the neighbourhood information in a form of tuple structure.
Parameters
----------
neighs_info: tuple
the neighborhood information for each element `i` and perturbations
`k`. The standards to set that information are:
* (idxs, ks)
"""
self.ks = [key[1]] if type(key[1]) == int else key[1]
self.set_neighs(key[0])
def _set_structure_list(self, key):
"""Set the neighbourhood information in a form of general list
structure.
Parameters
----------
neighs_info: tuple
the neighborhood information for each element `i` and perturbations
`k`. The standards to set that information are:
* [neighs_info{tuple form}]
"""
if len(key) == 0:
self.set_neighs = self._set_neighs_general_list
self.set_neighs(key)
elif type(key[0]) == tuple:
self._set_info = self._set_list_tuple_only_structure
self._set_info(key)
elif type(key[0]) == list:
if self._constant_neighs:
if self.staticneighs:
self.get_neighs = self._get_neighs_array_static
else:
self.get_neighs = self._get_neighs_array_dynamic
else:
if self.staticneighs:
self.get_neighs = self._get_neighs_list_static
else:
self.get_neighs = self._get_neighs_list_dynamic
# Format set neighs
self.set_neighs = self._set_neighs_general_list
self.set_neighs(key)
elif type(key[0]) == np.ndarray:
self.set_neighs = self._general_set_neighs
self.set_neighs(np.array(key))
elif type(key[0]) in [int, float, np.int32, np.int64]:
self.level = 1
self._set_info = self._set_raw_structure
self.ifdistance = False
self.set_sp_rel_pos = self._null_set_rel_pos
if self.staticneighs:
self.get_neighs = self._get_neighs_array_static
else:
self.get_neighs = self._get_neighs_array_dynamic
# Format set neighs
self.set_neighs = self._set_neighs_array_lvl1
self.set_neighs(np.array(key))
def _set_list_tuple_only_structure(self, key):
"""Set the neighbourhood information in a form of list tuple only
structure.
Parameters
----------
neighs_info: tuple
the neighborhood information for each element `i` and perturbations<|fim▁hole|>
"""
## Change to list and whatever it was
self.set_neighs([e[0] for e in key])
self.set_sp_rel_pos([e[1] for e in key])
def _set_tuple_list_tuple_structure(self, key):
"""Set the neighbourhood information in a form of tuple, list tuple
structure.
Parameters
----------
neighs_info: tuple
the neighborhood information for each element `i` and perturbations
`k`. The standards to set that information are:
* (neighs_info{list of typle only}, ks)
"""
self.ks = [key[1]] if type(key[1]) == int else key[1]
if not self.staticneighs:
assert(len(key[0]) == len(self.ks))
self._set_list_tuple_only_structure(key[0])
############################### Set Neighs ################################
###########################################################################
## After that has to be set:
# - self.idxs
# - self.ks
#
def _general_set_neighs(self, key):
"""General setting of only neighs.
Parameters
----------
neighs: list or np.ndarray
the neighs information for each element `i`. The standards to set
that information are:
* neighs {number form}
* neighs {list form}
* neighs {array form}
"""
if type(key) == list:
self._set_neighs_general_list(key)
elif type(key) == np.ndarray:
self._set_neighs_general_array(key)
elif type(key) in inttypes:
self._set_neighs_number(key)
else:
# print key
raise TypeError("Incorrect neighs input in pst.Neighs_Info")
def _set_neighs_number(self, key):
"""Only one neighbor expressed in a number way.
Parameters
----------
neighs: int
the neighborhood information for each element `i`. The standards to
set that information are:
* indice{int form}
"""
if self.staticneighs:
self.idxs = np.array([[key]]*len(self.iss))
else:
if self.ks is None:
self.ks = range(1)
len_ks = len(self.ks)
self.idxs = np.array([[[key]]*len(self.iss)]*len_ks)
self._constant_neighs = True
self._setted = True
def _set_neighs_slice(self, key):
"""Set neighs in a slice-form.
Parameters
----------
neighs: slice
the neighs information for each element `i`. The standards to set
that information are:
* indices{slice form}
"""
## Condition to use slice type
self._constant_neighs = True
self.ks = range(1) if self.ks is None else self.ks
## Possible options
if key is None:
self.idxs = slice(0, self._n, 1)
elif isinstance(key, slice):
start = 0 if key.start is None else key.start
stop = self._n if key.stop is None else key.stop
stop = self._n if key.stop > 10*16 else key.stop
step = 1 if key.step is None else key.step
self.idxs = slice(start, stop, step)
elif type(key) in inttypes:
self.idxs = slice(0, key, 1)
elif type(key) == tuple:
self.idxs = slice(key[0], key[1], 1)
self._setted = True
def _set_neighs_array_lvl1(self, key):
"""Set neighs as a array level 1 form.
Parameters
----------
neighs: np.ndarray
the neighs information for each element `i`. The standards to set
that information are:
* indices{np.ndarray form} shape: (neighs)
"""
#sh = key.shape
## If only array of neighs
if self.staticneighs:
self.idxs = np.array([key for i in range(len(self.iss))])
else:
self.ks = range(1) if self.ks is None else self.ks
len_ks = len(self.ks)
self.idxs = np.array([[key for i in range(len(self.iss))]
for i in range(len_ks)])
self._setted = True
def _set_neighs_array_lvl2(self, key):
"""Set neighs as array level 2 form.
Parameters
----------
neighs: np.ndarray
the neighs information for each element `i`. The standards to set
that information are:
* indices{np.ndarray form} shape: (iss, neighs)
"""
sh = key.shape
## If only iss and neighs
self.idxs = key
if self.staticneighs:
self.idxs = np.array(key)
else:
len_ks = len(self.ks) if self.ks is not None else 1
self.ks = range(1) if self.ks is None else self.ks
self.idxs = np.array([key for k in range(len_ks)])
self._setted = True
if sh[0] != len(self.iss):
self.iss = list(range(sh[0]))
def _set_neighs_array_lvl3(self, key):
"""Set neighs as array level 3 form.
Parameters
----------
neighs: np.ndarray
the neighs information for each element `i`. The standards to set
that information are:
* indices{np.ndarray form} shape: (ks, iss, neighs)
"""
self.idxs = np.array(key)
self.ks = range(len(self.idxs)) if self.ks is None else self.ks
if self.staticneighs:
self.idxs = np.array(key[0])
if len(self.idxs) != len(self.iss):
self.iss = list(range(len(self.idxs)))
else:
if len(self.idxs[0]) != len(self.iss):
self.iss = list(range(len(self.idxs[0])))
self._setted = True
def _set_neighs_general_array(self, key):
"""Set neighs as a general array form.
Parameters
----------
neighs: np.ndarray
the neighs information for each element `i`. The standards to set
that information are:
* indices{np.ndarray form} shape: (neighs)
* indices{np.ndarray form} shape: (iss, neighs)
* indices{np.ndarray form} shape: (ks, iss, neighs)
"""
key = np.array([key]) if type(key) in inttypes else key
sh = key.shape
## If only array of neighs
if len(sh) == 0:
self._set_neighs_number(key)
# self._setted = False
# if self.staticneighs:
# self.idxs = np.array([[]])
# else:
# self.idxs = np.array([[[]]])
elif len(sh) == 1:
self._set_neighs_array_lvl1(key)
## If only iss and neighs
elif len(sh) == 2:
self._set_neighs_array_lvl2(key)
elif len(sh) == 3:
self._set_neighs_array_lvl3(key)
def _set_neighs_general_list(self, key):
"""Set neighs as a general list form.
Parameters
----------
neighs: list
the neighs information for each element `i`. The standards to set
that information are:
* indices {list of list form [neighs]} [neighs]
* [neighs_info{array-like form}, ...] [iss][neighs]
* [neighs_info{array-like form}, ...] [ks][iss][neighs]
"""
### WARNING: NOT WORK WITH EMPTY NEIGHS
if '__len__' not in dir(key):
self._set_neighs_number(key)
else:
if len(key) == 0:
self._set_neighs_list_only(key)
elif '__len__' not in dir(key[0]):
self._set_neighs_list_only(key)
else:
if all([len(key[i]) == 0 for i in range(len(key))]):
self._setted = False
if self.staticneighs:
self.idxs = np.array([[]])
else:
self.idxs = np.array([[[]]])
elif '__len__' not in dir(key[0][0]):
self._set_neighs_list_list(key)
else:
self._set_neighs_list_list_list(key)
def _set_neighs_list_only(self, key):
"""Set the level 1 list
Parameters
----------
neighs: list
the neighs information for each element `i`. The standards to set
that information are:
* indices {list of list form [neighs]} [neighs]
"""
self._set_neighs_array_lvl1(np.array(key))
def _set_neighs_list_list(self, key):
"""Set the level 2 list.
Parameters
----------
neighs: list
the neighs information for each element `i`. The standards to set
that information are:
* [neighs_info{array-like form}, ...] [iss][neighs]
"""
if self._constant_neighs:
key = np.array(key)
if self.staticneighs:
self.idxs = key
self.ks = range(1) if self.ks is None else self.ks
else:
self.ks = range(1) if self.ks is None else self.ks
len_ks = len(self.ks)
self.idxs = [key for k in range(len_ks)]
if type(key) == np.ndarray:
self.idxs = np.array(self.idxs)
if len(self.iss) != len(key):
if len(self.iss) != len(key):
self.iss = range(len(key))
# if len(self.idxs[0]) > 0:
# self.iss = list(range(len(self.idxs)))
self._setted = True
def _set_neighs_list_list_list(self, key):
"""Set neighs as a level 3 list form.
Parameters
----------
neighs: list
the neighs information for each element `i`. The standards to set
that information are:
* [neighs_info{array-like form}, ...] [ks][iss][neighs]
"""
self.ks = list(range(len(key))) if self.ks is None else self.ks
if self._constant_neighs:
self.idxs = np.array(key)
else:
self.idxs = key
if len(self.idxs[0]) != len(self.iss):
self.iss = list(range(len(self.idxs[0])))
if self.staticneighs:
self.idxs = self.idxs[0]
self._setted = True
########################### Set Sp_relative_pos ###########################
###########################################################################
def _general_set_rel_pos(self, rel_pos):
"""Set the general relative position.
Parameters
----------
rel_pos: int, float, list or np.ndarray
the relative position of the neighbourhood respect the centroid.
The standard inputs form are:
* None
* list of arrays len(iss) -> unique rel_pos for ks
* list of lists of arrays -> complete
"""
if rel_pos is None or self.ifdistance is False:
self._null_set_rel_pos(rel_pos)
self.get_sp_rel_pos = self._null_get_rel_pos
elif type(rel_pos) == list:
self._set_rel_pos_general_list(rel_pos)
elif type(rel_pos) == np.ndarray:
self._set_rel_pos_general_array(rel_pos)
elif type(rel_pos) in [float, int, np.float, np.int32, np.int64]:
self._set_rel_pos_number(rel_pos)
else:
# print rel_pos
msg = "Incorrect relative position input in pst.Neighs_Info"
raise TypeError(msg)
def _set_rel_pos_general_list(self, rel_pos):
"""Set of relative position in a general list form.
Parameters
----------
rel_pos: list
the relative position of the neighbourhood respect the centroid.
The standard inputs form are:
* None
* list of arrays len(iss) -> unique rel_pos for ks
* list of lists of arrays -> complete
"""
if self.level is not None:
if self.level == 0:
self._set_rel_pos_dim(rel_pos)
elif self.level == 1:
self._list_only_set_rel_pos(rel_pos)
elif self.level == 2:
self._list_list_only_set_rel_pos(rel_pos)
elif self.level == 3:
self._list_list_set_rel_pos(rel_pos)
else:
if len(rel_pos) == 0:
self._set_rel_pos_number(rel_pos)
elif type(rel_pos[0]) not in [list, np.ndarray]:
self._list_only_set_rel_pos(rel_pos)
else:
if len(rel_pos[0]) == 0:
self._list_only_set_rel_pos(rel_pos)
elif type(rel_pos[0][0]) not in [list, np.ndarray]:
self._list_only_set_rel_pos(rel_pos)
else:
if len(rel_pos[0][0]) == 0:
self._list_list_only_set_rel_pos(rel_pos)
elif type(rel_pos[0][0][0]) not in [list, np.ndarray]:
self._list_list_only_set_rel_pos(rel_pos)
else:
self._list_list_set_rel_pos(rel_pos)
def _null_set_rel_pos(self, rel_pos):
"""Not consider the input.
Parameters
----------
rel_pos: list or np.ndarray
the relative position of the neighbourhood respect the centroid.
"""
self.get_sp_rel_pos = self._null_get_rel_pos
def _set_rel_pos_number(self, rel_pos):
"""Number set pos.
Parameters
----------
rel_pos: int or float
the relative position of the neighbourhood respect the centroid.
The standard inputs form are:
* int or float
"""
self.sp_relative_pos = self._set_rel_pos_dim([rel_pos])
def _set_rel_pos_dim(self, rel_pos):
"""Set rel pos with zero level.
Parameters
----------
rel_pos: list or np.ndarray
the relative position of the neighbourhood respect the centroid.
The standard inputs form are:
* rel_pos{array or list form} [dim]
"""
if not '__len__' in dir(rel_pos):
rel_pos = np.array([rel_pos])
if self.staticneighs:
rel_pos_f = []
for i in range(len(self.idxs)):
rel_pos_i = [rel_pos for nei in range(len(self.idxs[i]))]
rel_pos_f.append(rel_pos_i)
else:
rel_pos_f = []
for k in range(len(self.idxs)):
rel_pos_k = []
for i in range(len(self.idxs[k])):
n_nei = len(self.idxs[k][i])
rel_pos_k.append([rel_pos for nei in range(n_nei)])
rel_pos_f.append(rel_pos_k)
if self._constant_neighs:
rel_pos_f = np.array(rel_pos_f)
self.sp_relative_pos = rel_pos_f
# self.sp_relative_pos = np.array([[[rel_pos]]])
# self.get_sp_rel_pos = self._constant_get_rel_pos
# self.staticneighs = True
def _set_rel_pos_general_array(self, rel_pos):
"""Array set rel pos.
Parameters
----------
rel_pos: list or np.ndarray
the relative position of the neighbourhood respect the centroid.
The standard inputs form are:
* rel_pos{np.ndarray form} shape: (neighs, dim)
* rel_pos{np.ndarray form} shape: (iss, neighs, dim)
* rel_pos{np.ndarray form} shape: (ks, iss, neighs, dim)
"""
n_shape = len(rel_pos.shape)
if n_shape == 2:
self._array_only_set_rel_pos(rel_pos)
elif n_shape == 3:
self._array_array_set_rel_pos(rel_pos)
elif n_shape == 4:
self._array_array_array_set_rel_pos(rel_pos)
def _array_only_set_rel_pos(self, rel_pos):
"""Set the array form relative position.
Parameters
----------
rel_pos: list or np.ndarray
the relative position of the neighbourhood respect the centroid.
The standard inputs form are:
* Array only. [nei][dim] or [nei]
"""
## Preformatting
rel_pos = np.array(rel_pos)
if len(rel_pos.shape) == 1:
rel_pos = rel_pos.reshape((len(rel_pos), 1))
n_iss = len(self.iss)
sp_relative_pos = np.array([rel_pos for i in range(n_iss)])
## Not staticneighs
if not self.staticneighs:
n_k = len(self.idxs)
sp_relative_pos = np.array([sp_relative_pos for i in range(n_k)])
self.sp_relative_pos = sp_relative_pos
def _array_array_set_rel_pos(self, rel_pos):
"""Set the array-array (level 2) relative position.
Parameters
----------
rel_pos: list or np.ndarray
the relative position of the neighbourhood respect the centroid.
The standard inputs form are:
*Array or arrays. [iss][nei][dim] or [nei].
"""
# self.staticneighs = True
if self.staticneighs:
self.sp_relative_pos = np.array(rel_pos)
else:
len_ks = 1 if self.ks is None else len(self.ks)
self.sp_relative_pos = np.array([rel_pos for k in range(len_ks)])
def _array_array_array_set_rel_pos(self, rel_pos):
"""Set the level 3 array relative position.
Parameters
----------
rel_pos: list or np.ndarray
the relative position of the neighbourhood respect the centroid.
The standard inputs form are:
* Array or arrays. [ks][iss][nei][dim] or [ks][nei].
"""
if self.staticneighs:
self.sp_relative_pos = rel_pos[0]
else:
self.sp_relative_pos = rel_pos
def _list_only_set_rel_pos(self, rel_pos):
"""List only relative pos. Every iss and ks has the same neighs with
the same relative information.
Parameters
----------
rel_pos: list or np.ndarray
the relative position of the neighbourhood respect the centroid.
The standard inputs form are:
* [nei][dim] or [nei]
"""
self._array_only_set_rel_pos(rel_pos)
def _list_list_only_set_rel_pos(self, rel_pos):
"""List list only relative pos. Every ks has the same neighs with the
same relative information.
Parameters
----------
rel_pos: list or np.ndarray
the relative position of the neighbourhood respect the centroid.
The standard inputs form are:
*[iss][nei][dim] or [iss][nei]
"""
if self.staticneighs is not True:
assert(self.ks is not None)
n_ks = len(self.ks)
self.sp_relative_pos = [rel_pos]*n_ks
else:
self.sp_relative_pos = rel_pos
def _list_list_set_rel_pos(self, rel_pos):
"""List list list relative pos.
Parameters
----------
rel_pos: list or np.ndarray
the relative position of the neighbourhood respect the centroid.
The standard inputs form are:
* [ks][iss][nei][dim] or [ks][iss][nei]
"""
if self.staticneighs:
self.sp_relative_pos = rel_pos[0]
else:
self.sp_relative_pos = rel_pos
############################### Setter iss ################################
###########################################################################
def _general_set_iss(self, iss):
"""General set iss input.
Parameters
----------
iss: list or np.ndarray
the indices of the elements we stored their neighbourhood.
"""
if type(iss) == int:
self._int_set_iss(iss)
elif type(iss) in [list, np.ndarray]:
self._list_set_iss(iss)
else:
if type(self.idxs) in [list, np.ndarray]:
if self.staticneighs:
if len(self.iss) != len(self.idxs):
self.iss = range(len(self.idxs))
else:
if len(self.iss) != len(self.idxs[0]):
self.iss = range(len(self.idxs[0]))
def _int_set_iss(self, iss):
"""Input iss always integer.
Parameters
----------
iss: list or np.ndarray
the indices of the elements we stored their neighbourhood.
"""
self.iss = [iss]
def _list_set_iss(self, iss):
"""Input iss always array-like.
Parameters
----------
iss: list or np.ndarray
the indices of the elements we stored their neighbourhood.
"""
self.iss = list(iss)
def _null_set_iss(self, iss):
"""Not consider the input.
Parameters
----------
iss: list or np.ndarray
the indices of the elements we stored their neighbourhood.
"""
pass
###########################################################################
################################## GETS ###################################
###########################################################################
############################# Getter rel_pos ##############################
###########################################################################
def _general_get_rel_pos(self, k_is=[0]):
"""Get the relative position.
Parameters
----------
ks: int, slice, list or np.ndarray (default=[0])
the perturbations indices associated with the returned information.
Returns
-------
sp_relpos: list or np.ndarray (default=None)
the relative position information for each element `i` and for each
perturbation `k`.
"""
if self.sp_relative_pos is None:
return self._null_get_rel_pos(k_is)
elif self.staticneighs:
return self._static_get_rel_pos(k_is)
# elif self._constant_rel_pos:
# return self._constant_get_rel_pos(k_is)
else:
if type(self.sp_relative_pos) == list:
return self._dynamic_rel_pos_list(k_is)
else:
return self._dynamic_rel_pos_array(k_is)
def _null_get_rel_pos(self, k_is=[0]):
"""Get the relative position.
Parameters
----------
ks: int, slice, list or np.ndarray (default=[0])
the perturbations indices associated with the returned information.
Returns
-------
sp_relpos: list or np.ndarray (default=None)
the relative position information for each element `i` and for each
perturbation `k`.
"""
return [[None]*len(self.iss)]*len(k_is)
# def _constant_get_rel_pos(self, k_is=[0]):
# neighs = self.get_neighs(k_is)
# rel_pos = []
# for k in range(len(neighs)):
# rel_pos_k = []
# for i in range(len(neighs[k])):
# rel_pos_k.append(len(neighs[k][i])*[self.sp_relative_pos])
# rel_pos.append(rel_pos_k)
# if self._constant_neighs:
# rel_pos = np.array(rel_pos)
# return rel_pos
def _static_get_rel_pos(self, k_is=[0]):
"""Get the relative position.
Parameters
----------
ks: int, slice, list or np.ndarray (default=[0])
the perturbations indices associated with the returned information.
Returns
-------
sp_relpos: list or np.ndarray (default=None)
the relative position information for each element `i` and for each
perturbation `k`.
"""
return [self.sp_relative_pos for k in k_is]
# def _static_rel_pos_list(self, k_is=[0]):
# return self.sp_relative_pos*len(k_is)
#
# def _static_rel_pos_array(self, k_is=[0]):
# return np.array([self.sp_relative_pos for i in range(len(k_is))])
def _dynamic_rel_pos_list(self, k_is=[0]):
"""Get the relative position.
Parameters
----------
ks: int, slice, list or np.ndarray (default=[0])
the perturbations indices associated with the returned information.
Returns
-------
sp_relpos: list or np.ndarray (default=None)
the relative position information for each element `i` and for each
perturbation `k`.
"""
# [[e[k_i] for e in self.sp_relative_pos] for k_i in k_is]
return [self.sp_relative_pos[i] for i in k_is]
def _dynamic_rel_pos_array(self, k_is=[0]):
"""Get the relative position.
Parameters
----------
ks: int, slice, list or np.ndarray (default=[0])
the perturbations indices associated with the returned information.
Returns
-------
sp_relpos: list or np.ndarray (default=None)
the relative position information for each element `i` and for each
perturbation `k`.
"""
# [[e[k_i] for e in self.sp_relative_pos] for k_i in k_is]
return [self.sp_relative_pos[i] for i in k_is]
################################ Getters k ################################
###########################################################################
def _general_get_k(self, k=None):
"""General get k.
Parameters
----------
ks: int, slice, list or np.ndarray
the perturbations indices associated unformatted.
Returns
-------
ks: int, slice, list or np.ndarray
the perturbations indices associated formatted.
"""
## Format k
if k is None:
ks = self._default_get_k()
elif type(k) in [np.ndarray, list]:
ks = self._list_get_k(k)
elif type(k) in inttypes:
ks = self._integer_get_k(k)
return ks
def _default_get_k(self, k=None):
"""Default get ks.
Parameters
----------
ks: int, slice, list or np.ndarray
the perturbations indices associated unformatted.
Returns
-------
ks: int, slice, list or np.ndarray
the perturbations indices associated formatted.
"""
if self.ks is None:
return [0]
else:
return self.ks
def _integer_get_k(self, k):
"""Integer get k.
Parameters
----------
ks: int, slice, list or np.ndarray
the perturbations indices associated unformatted.
Returns
-------
ks: int, slice, list or np.ndarray
the perturbations indices associated formatted.
"""
if type(k) == list:
return [self._integer_get_k(e)[0] for e in k]
if k >= 0 and k <= self._kret:
ks = [k]
else:
raise TypeError("k index out of bounds.")
return ks
def _list_get_k(self, k):
"""List get k.
Parameters
----------
ks: int, slice, list or np.ndarray
the perturbations indices associated unformatted.
Returns
-------
ks: int, slice, list or np.ndarray
the perturbations indices associated formatted.
"""
ks = [self._integer_get_k(k_i)[0] for k_i in k]
return ks
def _get_k_indices(self, ks):
"""List of indices of ks.
Parameters
----------
ks: int, slice, list or np.ndarray
the perturbations indices associated with the returned information.
Returns
-------
idx_ks: list
the associated indices to the perturbation indices. Get the index
order.
"""
if self.staticneighs:
idx_ks = ks
else:
idx_ks = [self.ks.index(e) for e in ks]
return idx_ks
############################ Getters information ##########################
###########################################################################
def _general_get_information(self, k=None):
"""Get information stored in this class.
Parameters
----------
ks: int, slice, list or np.ndarray (default=None)
the perturbations indices associated with the returned information.
Returns
-------
neighs: list or np.ndarray
the neighs information for each element `i` for each possible
perturbation `k` required in the input.
sp_relpos: list or np.ndarray (default=None)
the relative position information for each element `i` and for each
perturbation `k`.
ks: int, slice, list or np.ndarray (default=None)
the perturbations indices associated with the returned information.
iss: list or np.ndarray
the indices of the elements we stored their neighbourhood.
"""
## Format k
ks = self.get_k(k)
idx_ks = self._get_k_indices(ks)
## Get iss
iss = self.iss
## Format idxs
assert(type(idx_ks) == list)
neighs = self.get_neighs(idx_ks)
sp_relative_pos = self.get_sp_rel_pos(idx_ks)
self.check_output_standards(neighs, sp_relative_pos, ks, iss)
# print '3'*50, neighs, sp_relative_pos, ks, iss
return neighs, sp_relative_pos, ks, iss
def _default_get_information(self, k=None):
"""For the unset instances.
Parameters
----------
ks: int, slice, list or np.ndarray (default=None)
the perturbations indices associated with the returned information.
Returns
-------
neighs: list or np.ndarray
the neighs information for each element `i` for each possible
perturbation `k` required in the input.
sp_relpos: list or np.ndarray (default=None)
the relative position information for each element `i` and for each
perturbation `k`.
ks: int, slice, list or np.ndarray (default=None)
the perturbations indices associated with the returned information.
iss: list or np.ndarray
the indices of the elements we stored their neighbourhood.
"""
raise Exception("Information not set in pst.Neighs_Info.")
################################ Get neighs ###############################
def _get_neighs_general(self, k_is=[0]):
"""General getting neighs.
Parameters
----------
ks: int, slice, list or np.ndarray (default=[0])
the perturbations indices associated with the returned information.
Returns
-------
neighs: list or np.ndarray
the neighs information for each element `i` for each possible
perturbation `k` required in the input.
"""
if type(self.idxs) == slice:
neighs = self._get_neighs_slice(k_is)
elif type(self.idxs) == np.ndarray:
if self.staticneighs:
neighs = self._get_neighs_array_static(k_is)
else:
neighs = self._get_neighs_array_dynamic(k_is)
elif type(self.idxs) == list:
if self.staticneighs:
neighs = self._get_neighs_list_static(k_is)
else:
neighs = self._get_neighs_list_dynamic(k_is)
# else:
# self._default_get_neighs()
return neighs
def _get_neighs_slice(self, k_is=[0]):
"""Getting neighs from slice.
Parameters
----------
ks: slice (default=[0])
the perturbations indices associated with the returned information.
Returns
-------
neighs: list or np.ndarray
the neighs information for each element `i` for each possible
perturbation `k` required in the input.
"""
neighs = [np.array([range(self.idxs.start, self.idxs.stop,
self.idxs.step)
for j in range(len(self.iss))])
for i in range(len(k_is))]
neighs = np.array(neighs)
return neighs
def _get_neighs_array_dynamic(self, k_is=[0]):
"""Getting neighs from array.
Parameters
----------
ks: np.ndarray (default=[0])
the perturbations indices associated with the returned information.
Returns
-------
neighs: list or np.ndarray
the neighs information for each element `i` for each possible
perturbation `k` required in the input.
"""
neighs = self.idxs[k_is, :, :]
return neighs
def _get_neighs_array_static(self, k_is=[0]):
"""Getting neighs from array.
Parameters
----------
ks: np.ndarray (default=[0])
the perturbations indices associated with the returned information.
Returns
-------
neighs: list or np.ndarray
the neighs information for each element `i` for each possible
perturbation `k` required in the input.
"""
neighs = [self.idxs for i in range(len(k_is))]
neighs = np.array(neighs)
return neighs
def _get_neighs_list_dynamic(self, k_is=[0]):
"""Getting neighs from list.
Parameters
----------
ks: list (default=[0])
the perturbations indices associated with the returned information.
Returns
-------
neighs: list or np.ndarray
the neighs information for each element `i` for each possible
perturbation `k` required in the input.
"""
neighs = [self.idxs[k_i] for k_i in k_is]
return neighs
def _get_neighs_list_static(self, k_is=[0]):
"""Getting neighs from list.
Parameters
----------
ks: list or np.ndarray (default=[0])
the perturbations indices associated with the returned information.
Returns
-------
neighs: list or np.ndarray
the neighs information for each element `i` for each possible
perturbation `k` required in the input.
"""
neighs = [self.idxs for k_i in k_is]
return neighs
def _default_get_neighs(self, k_i=0):
"""Default get neighs (when it is not set)
Parameters
----------
ks: int, list or np.ndarray (default=0)
the perturbations indices associated with the returned information.
Returns
-------
neighs: list or np.ndarray
the neighs information for each element `i` for each possible
perturbation `k` required in the input.
"""
raise Exception("Information not set in pst.Neighs_Info.")
########################## Get by coreinfo by iss #########################
## Get the neighs_info copy object with same information but iss reduced.
## Format into get_copy_iss and get_copy_iss_by_ind
def _staticneighs_get_copy_iss(self, iss):
"""Get the neighs_info copy object with same information but iss
reduced.
Parameters
----------
iss: list or np.ndarray
the indices of the elements we stored their neighbourhood.
Returns
-------
neighs_info: pst.Neighs_Info
the neighbourhood information of the elements `i` for the
perturbations `k`.
"""
inds = self._get_indices_from_iss(iss)
return self._staticneighs_get_copy_iss_by_ind(inds)
def _notstaticneighs_get_copy_iss(self, iss):
"""Get the neighs_info copy object with same information but iss
reduced.
Parameters
----------
iss: list or np.ndarray
the indices of the elements we stored their neighbourhood.
Returns
-------
neighs_info: pst.Neighs_Info
the neighbourhood information of the elements `i` for the
perturbations `k`.
"""
inds = self._get_indices_from_iss(iss)
return self._notstaticneighs_get_copy_iss_by_ind(inds)
def _staticneighs_get_copy_iss_by_ind(self, indices):
"""Get the neighs_info copy object with same information but iss
reduced.
Parameters
----------
iss: list or np.ndarray
the indices of the elements we stored their neighbourhood.
Returns
-------
neighs_info: pst.Neighs_Info
the neighbourhood information of the elements `i` for the
perturbations `k`.
"""
indices = [indices] if type(indices) == int else indices
iss = [self.iss[i] for i in indices]
idxs, sp_relpos = self._staticneighs_get_corestored_by_inds(indices)
## Copy of information in new container
neighs_info = self.copy()
neighs_info.idxs = idxs
neighs_info.sp_relative_pos = sp_relpos
neighs_info.iss = iss
return neighs_info
def _notstaticneighs_get_copy_iss_by_ind(self, indices):
"""Get the neighs_info copy object with same information but iss
reduced.
Parameters
----------
inds: list
the indices of the elements codes we stored their neighbourhood.
Returns
-------
neighs_info: pst.Neighs_Info
the neighbourhood information of the elements `i` for the
perturbations `k`.
"""
indices = [indices] if type(indices) == int else indices
iss = [self.iss[i] for i in indices]
idxs, sp_relpos = self._notstaticneighs_get_corestored_by_inds(indices)
## Copy of information in new container
neighs_info = self.copy()
neighs_info.idxs = idxs
neighs_info.sp_relative_pos = sp_relpos
neighs_info.iss = iss
return neighs_info
## Auxiliar functions
def _staticneighs_get_corestored_by_inds_notslice(self, inds):
"""Get the neighborhood information from the indices.
Parameters
----------
inds: list
the indices of the elements codes we stored their neighbourhood.
Returns
-------
neighs: list or np.ndarray
the neighs information for each element `i` and for each
perturbation `k`.
sp_relpos: list or np.ndarray (default=None)
the relative position information for each element `i` and for each
perturbation `k`.
"""
inds = [inds] if type(inds) == int else inds
idxs = [self.idxs[i] for i in inds]
idxs = np.array(idxs) if type(self.idxs) == np.ndarray else idxs
if self.sp_relative_pos is not None:
sp_relative_pos = [self.sp_relative_pos[i] for i in inds]
else:
sp_relative_pos = None
return idxs, sp_relative_pos
def _notstaticneighs_get_corestored_by_inds_notslice(self, inds):
"""Get the neighborhood information from the indices.
Parameters
----------
inds: list
the indices of the elements codes we stored their neighbourhood.
Returns
-------
neighs: list or np.ndarray
the neighs information for each element `i` and for each
perturbation `k`.
sp_relpos: list or np.ndarray (default=None)
the relative position information for each element `i` and for each
perturbation `k`.
"""
inds = [inds] if type(inds) == int else inds
idxs = []
for k in range(len(self.idxs)):
idxs.append([self.idxs[k][i] for i in inds])
idxs = np.array(idxs) if type(self.idxs) == np.ndarray else idxs
if self.sp_relative_pos is not None:
sp_relative_pos = []
for k in range(len(self.sp_relative_pos)):
sp_relative_pos += [[self.sp_relative_pos[k][i] for i in inds]]
else:
sp_relative_pos = None
return idxs, sp_relative_pos
def _staticneighs_get_corestored_by_inds_slice(self, inds):
"""Get the neighborhood information from the indices.
Parameters
----------
inds: list
the indices of the elements codes we stored their neighbourhood.
Returns
-------
neighs: list or np.ndarray
the neighs information for each element `i` and for each
perturbation `k`.
sp_relpos: list or np.ndarray (default=None)
the relative position information for each element `i` and for each
perturbation `k`.
"""
inds = [inds] if type(inds) == int else inds
idxs = self.idxs
if self.sp_relative_pos is not None:
sp_relative_pos = [self.sp_relative_pos[i] for i in inds]
else:
sp_relative_pos = None
return idxs, sp_relative_pos
def _notstaticneighs_get_corestored_by_inds_slice(self, inds):
"""Get the neighborhood information from the indices.
Parameters
----------
inds: list
the indices of the elements codes we stored their neighbourhood.
Returns
-------
neighs: list or np.ndarray
the neighs information for each element `i` and for each
perturbation `k`.
sp_relpos: list or np.ndarray (default=None)
the relative position information for each element `i` and for each
perturbation `k`.
"""
inds = [inds] if type(inds) == int else inds
idxs = self.idxs
if self.sp_relative_pos is not None:
sp_relative_pos = []
for k in range(len(self.sp_relative_pos)):
sp_relative_pos += [[self.sp_relative_pos[k][i] for i in inds]]
else:
sp_relative_pos = None
return idxs, sp_relative_pos
def _get_indices_from_iss(self, iss):
"""Indices of iss from self.iss.
Parameters
----------
iss: list or np.ndarray
the indices of the elements we stored their neighbourhood.
Returns
-------
inds: list
the indices of the elements codes we stored their neighbourhood.
"""
iss = [iss] if type(iss) not in [np.ndarray, list] else iss
if self.iss is not None:
inds = []
for i in iss:
inds.append(list(self.iss).index(i))
# else:
# inds = iss
return inds
###########################################################################
################################ CHECKERS #################################
###########################################################################
### Only activate that in a testing process
def assert_goodness(self):
"""Assert standarts of storing."""
if self._setted:
self.assert_stored_iss()
self.assert_stored_ks()
## Check idxs
self.assert_stored_idxs()
## Check sp_relative_pos
self.assert_stored_sp_rel_pos()
def assert_stored_sp_rel_pos(self):
"""Definition of the standart store for sp_relative_pos."""
# ## Temporal
# if self.sp_relative_pos is not None:
# if self._constant_neighs:
# if self.staticneighs:
# assert(len(np.array(self.sp_relative_pos).shape) == 3)
# else:
# assert(len(np.array(self.sp_relative_pos).shape) == 4)
# #################
array_types = [list, np.ndarray]
if self.sp_relative_pos is not None:
assert(type(self.sp_relative_pos) in [list, np.ndarray])
# if type(self.sp_relative_pos) in [float, int, np.int32, np.int64]:
# ### Probably redundant
# # it is needed or possible this situation?
# pass
assert(type(self.sp_relative_pos) in [list, np.ndarray])
# if self.ks is None:
# assert(self.staticneighs)
# assert(len(self.sp_relative_pos) == len(self.iss))
if self.staticneighs:
assert(len(self.sp_relative_pos) == len(self.iss))
## Assert deep 3
if len(self.iss):
assert(type(self.sp_relative_pos[0]) in array_types)
else:
assert(self.ks is not None)
assert(len(self.sp_relative_pos) == len(self.ks))
if type(self.sp_relative_pos[0]) in array_types:
if not self.staticneighs:
assert(len(self.sp_relative_pos[0]) == len(self.iss))
if len(self.sp_relative_pos[0]) > 0:
assert(type(self.sp_relative_pos[0][0]) in array_types)
def assert_stored_iss(self):
"""Definition of the standart store for iss."""
assert(type(self.iss) == list)
assert(len(self.iss) > 0)
def assert_stored_ks(self):
"""Definition of the standart store for ks."""
assert(self.ks is None or type(self.ks) in [list, np.ndarray])
if self.ks is not None:
assert(type(self.ks[0]) in inttypes)
def assert_stored_idxs(self):
"""Definition of the standart store for sp_relative_pos."""
if type(self.idxs) == list:
assert(type(self.idxs[0]) in [list, np.ndarray])
if not self.staticneighs:
assert(type(self.idxs[0][0]) in [list, np.ndarray])
else:
if '__len__' in dir(self.idxs[0]):
if len(self.idxs[0]):
assert(type(self.idxs[0][0]) in inttypes)
else:
assert(not any(self.idxs[0]))
elif type(self.idxs) == np.ndarray:
if self.staticneighs:
assert(len(self.idxs.shape) == 2)
else:
assert(len(self.idxs.shape) == 3)
# if self.ks is not None and not self.staticneighs:
# assert(len(self.idxs) == len(self.ks))
# else:
# assert(len(self.idxs.shape) == 2)
if self.staticneighs:
assert(len(self.idxs) == len(self.iss))
else:
assert(len(self.idxs[0]) == len(self.iss))
elif type(self.idxs) == slice:
pass
else:
### Probably redundant (Only testing purposes)
# print type(self.idxs), self.idxs
types = str(type(self.idxs))
raise Exception("Not proper type in self.idxs. Type: %s." % types)
def check_output_standards(self, neighs, sp_relative_pos, ks, iss):
"""Check output standarts.
Parameters
----------
neighs: list or np.ndarray
the neighs information for each element `i` for each possible
perturbation `k`.
sp_relpos: list or np.ndarray
the relative position information for each element `i` for each
perturbation `k`.
ks: list or np.ndarray
the perturbations indices associated with the returned information.
iss: list or np.ndarray
the indices of the elements we stored their neighbourhood.
"""
self.check_output_neighs(neighs, ks)
self.check_output_rel_pos(sp_relative_pos, ks)
assert(len(iss) == len(self.iss))
def check_output_neighs(self, neighs, ks):
"""Check standart outputs of neighs.
Parameters
----------
neighs: list or np.ndarray
the neighs information for each element `i` for each possible
perturbation `k`.
ks: list or np.ndarray
the perturbations indices associated with the returned information.
"""
if type(neighs) == list:
assert(len(neighs) == len(ks))
#assert(type(neighs[0]) == list)
assert(len(neighs[0]) == len(self.iss))
elif type(neighs) == np.ndarray:
assert(len(neighs.shape) == 3)
assert(len(neighs) == len(ks))
assert(neighs.shape[1] == len(self.iss))
else:
### Probably redundant (Only testing purposes)
# print neighs
types = str(type(neighs))
raise Exception("Not correct neighs output.Type: %s." % types)
def check_output_rel_pos(self, sp_relative_pos, ks):
"""Check standart outputs of rel_pos.
Parameters
----------
sp_relpos: list or np.ndarray
the relative position information for each element `i` for each
perturbation `k`.
ks: list or np.ndarray
the perturbations indices associated with the returned information.
"""
assert(type(sp_relative_pos) in [np.ndarray, list])
assert(len(sp_relative_pos) == len(ks))
assert(len(sp_relative_pos[0]) == len(self.iss))
########################### Joinning functions ############################
def _format_joining_functions(self):
"""Format the joining functions to use."""
## TODO: Extend to n possible neighs_info elements
if self.staticneighs:
if self.ifdistance:
self.join_neighs_and = join_neighsinfo_AND_static_dist
self.join_neighs_or = join_neighsinfo_OR_static_dist
self.join_neighs_xor = join_neighsinfo_XOR_static_dist
else:
self.join_neighs_and = join_neighsinfo_AND_static_notdist
self.join_neighs_or = join_neighsinfo_OR_static_notdist
self.join_neighs_xor = join_neighsinfo_XOR_static_notdist
else:
if self.ifdistance:
self.join_neighs_and = join_neighsinfo_AND_notstatic_dist
self.join_neighs_or = join_neighsinfo_OR_notstatic_dist
self.join_neighs_xor = join_neighsinfo_XOR_notstatic_dist
else:
self.join_neighs_and = join_neighsinfo_AND_notstatic_notdist
self.join_neighs_or = join_neighsinfo_OR_notstatic_notdist
self.join_neighs_xor = join_neighsinfo_XOR_notstatic_notdist
def join_neighs(self, neighs_info, mode='and', joiner_pos=None):
"""General joining function.
Parameters
----------
neighs_info: pst.Neighs_Info
the neighbourhood information of the other neighs we want to join.
mode: str optional ['and', 'or', 'xor']
the type of joining process we want to do.
joiner_pos: function (default=None)
the function to join the relative positions of the different
neighbourhood.
Returns
-------
new_neighs_info: pst.Neighs_Info
the neighbourhood information of joined neighbourhood.
"""
assert(mode in ['and', 'or', 'xor'])
if mode == 'and':
if self.ifdistance:
new_neighs_info = self.join_neighs_and(self, neighs_info,
joiner_pos)
else:
new_neighs_info = self.join_neighs_and(self, neighs_info)
elif mode == 'or':
if self.ifdistance:
new_neighs_info = self.join_neighs_or(self, neighs_info,
joiner_pos)
else:
new_neighs_info = self.join_neighs_or(self, neighs_info)
elif mode == 'xor':
if self.ifdistance:
new_neighs_info = self.join_neighs_xor(self, neighs_info,
joiner_pos)
else:
new_neighs_info = self.join_neighs_xor(self, neighs_info)
return new_neighs_info
###############################################################################
######################### Auxiliar inspect functions ##########################
###############################################################################
def ensuring_neighs_info(neighs_info, k):
"""Ensuring that the neighs_info is in Neighs_Info object container.
Parameters
----------
neighs_info: pst.Neighs_Info or tuple
the neighbourhood information.
k: list
the list of perturbation indices.
Returns
-------
neighs_info: pst.Neighs_Info
the properly formatted neighbourhood information.
"""
if not type(neighs_info).__name__ == 'instance':
parameters = inspect_raw_neighs(neighs_info, k=k)
parameters['format_structure'] = 'tuple_k'
neighs_info_object = Neighs_Info(**parameters)
neighs_info_object.set((neighs_info, k))
neighs_info = neighs_info_object
return neighs_info
def inspect_raw_neighs(neighs_info, k=0):
"""Useful class to inspect a raw structure neighs, in order to set
some parts of the class in order to a proper settting adaptation.
Parameters
----------
neighs_info: pst.Neighs_Info or tuple
the neighbourhood information.
k: int or list (default=0)
the list of perturbation indices.
Returns
-------
parameters: dict
the parameters to reinstantiate the neighbourhood information
properly.
"""
deep = find_deep(neighs_info)
k = [k] if type(k) == int else k
parameters = {'format_structure': 'raw'}
parameters['format_level'] = deep
if deep == 3:
assert(np.max(k) <= len(neighs_info))
parameters['kret'] = len(neighs_info)
parameters['staticneighs'] = False
else:
parameters['staticneighs'] = True
parameters['kret'] = np.max(k)
return parameters
def find_deep(neighs_info):
"""Find deep from a raw structure.
Parameters
----------
neighs_info: tuple
the neighbourhood information.
Returns
-------
deep: int
the level in which the information is provided.
"""
if '__len__' not in dir(neighs_info):
deep = 0
else:
if len(neighs_info) == 0:
deep = 1
elif '__len__' not in dir(neighs_info[0]):
deep = 1
else:
logi = [len(neighs_info[i]) == 0 for i in range(len(neighs_info))]
if all(logi):
deep = 2
elif '__len__' not in dir(neighs_info[0][0]):
deep = 2
else:
deep = 3
return deep
def neighsinfo_features_preformatting_tuple(key, k_perturb):
"""Preformatting tuple.
Parameters
----------
neighs_info: tuple
the neighborhood information. Assumed that tuple input:
* idxs, ks
k_perturb: int
the number of perturbations.
Returns
-------
neighs: list or np.ndarray
the neighs information for each element `i` for each possible
perturbation `k`.
ks: list or np.ndarray
the perturbations indices associated with the returned information.
sp_relpos: list or np.ndarray
the relative position information for each element `i` for each
perturbation `k`.
"""
deep = find_deep(key[0])
if deep == 1:
ks = [key[1]] if type(key[1]) == int else key[1]
i, k, d = neighsinfo_features_preformatting_list(key[0], ks)
else:
neighs_info = Neighs_Info()
neighs_info.set_information(k_perturb)
neighs_info.set(key)
# Get information
i, d, k, _ = neighs_info.get_information()
return i, k, d
def neighsinfo_features_preformatting_list(key, k_perturb):
"""Preformatting list.
Parameters
----------
neighs_info: list
the neighborhood information. Assumed that tuple input:
* idxs, ks
k_perturb: int
the number of perturbations.
Returns
-------
neighs: list or np.ndarray
the neighs information for each element `i` for each possible
perturbation `k`.
ks: list or np.ndarray
the perturbations indices associated with the returned information.
sp_relpos: list or np.ndarray
the relative position information for each element `i` for each
perturbation `k`.
"""
kn = range(k_perturb+1) if type(k_perturb) == int else k_perturb
key = [[idx] for idx in key]
i, k, d = np.array([key]*len(kn)), kn, [[None]*len(key)]*len(kn)
return i, k, d
###############################################################################
####################### Complementary Joinning function #######################
###############################################################################
def join_by_iss(list_neighs_info):
"""Joinning by iss.
Parameters
----------
list_neighs_info: list of pst.Neighs_Info
the list of different neighbourhood information, with overlapping
set of iss.
Returns
-------
neighs_info: tuple
the joined neighbourhood information.
"""
## Computation
if len(list_neighs_info) == 1:
return list_neighs_info[0]
static = list_neighs_info[0].staticneighs
ifdistance = list_neighs_info[0].sp_relative_pos is not None
assert([nei.sp_relative_pos == ifdistance for nei in list_neighs_info])
assert([nei.staticneighs == static for nei in list_neighs_info])
ks = list_neighs_info[0].ks
# print ks
# print [nei.ks for nei in list_neighs_info]
assert(all([len(nei.ks) == len(ks) for nei in list_neighs_info]))
assert(all([nei.ks == ks for nei in list_neighs_info]))
if static:
sp_relative_pos = None if not ifdistance else []
iss, idxs = [], []
for nei in list_neighs_info:
if type(nei.idxs) != slice:
idxs += list(nei.idxs)
else:
idxs.append(nei.idxs)
iss += nei.iss
if ifdistance:
sp_relative_pos += list(nei.sp_relative_pos)
else:
sp_relative_pos = None if not ifdistance else []
iss = list(np.hstack([nei.iss for nei in list_neighs_info]))
idxs = []
for k in range(len(ks)):
idxs_k = []
sp_relative_pos_k = None if not ifdistance else []
for nei in list_neighs_info:
idxs_k += list(nei.idxs[k])
if ifdistance:
sp_relative_pos_k += list(nei.sp_relative_pos[k])
idxs.append(idxs_k)
if ifdistance:
sp_relative_pos.append(sp_relative_pos_k)
constant = list_neighs_info[0]._constant_neighs
assert([nei._constant_neighs == constant for nei in list_neighs_info])
if constant:
idxs = np.array(idxs)
## Formatting
level = 2 if static else 3
_, type_neighs, type_sp_rel_pos, _ = list_neighs_info[0].format_set_info
format_get_info, format_get_k_info = list_neighs_info[0].format_get_info
type_neighs = 'array' if constant else 'list'
nei = Neighs_Info(constant_neighs=constant, format_structure='tuple_only',
format_get_info=None, format_get_k_info=None,
format_set_iss='list', staticneighs=static,
ifdistance=ifdistance, type_neighs=type_neighs,
format_level=level)
neighs_nfo = (idxs, sp_relative_pos) if ifdistance else (idxs,)
nei.set(neighs_nfo, iss)
nei.set_ks(ks)
return nei<|fim▁end|> | `k`. The standards to set that information are:
* [(neighs{any form}, sp_relative_pos{any form})] |
<|file_name|>list_box.rs<|end_file_name|><|fim▁begin|>// This file was generated by gir (5c017c9) from gir-files (71d73f0)
// DO NOT EDIT
#[cfg(feature = "v3_10")]
use Adjustment;
use Container;
#[cfg(feature = "v3_10")]
use ListBoxRow;
use MovementStep;
use SelectionMode;
use Widget;
use ffi;
use glib;
use glib::Value;
use glib::object::Downcast;
use glib::object::IsA;
use glib::signal::connect;
use glib::translate::*;
use glib_ffi;
use gobject_ffi;
use libc;
use std::boxed::Box as Box_;
use std::mem::transmute;
glib_wrapper! {
pub struct ListBox(Object<ffi::GtkListBox>): Container, Widget;
match fn {
get_type => || ffi::gtk_list_box_get_type(),
}
}
impl ListBox {
#[cfg(feature = "v3_10")]
pub fn new() -> ListBox {
assert_initialized_main_thread!();
unsafe {
Widget::from_glib_none(ffi::gtk_list_box_new()).downcast_unchecked()
}
}
}
pub trait ListBoxExt {
//#[cfg(feature = "v3_16")]
//fn bind_model<'a, 'b, P: IsA</*Ignored*/gio::ListModel> + 'a, Q: Into<Option<&'a P>>, R: Into<Option<&'b /*Unimplemented*/ListBoxCreateWidgetFunc>>, S: Into<Option</*Unimplemented*/Fundamental: Pointer>>>(&self, model: Q, create_widget_func: R, user_data: S, user_data_free_func: /*Unknown conversion*//*Unimplemented*/DestroyNotify);
#[cfg(feature = "v3_10")]
fn drag_highlight_row(&self, row: &ListBoxRow);
#[cfg(feature = "v3_10")]
fn drag_unhighlight_row(&self);
#[cfg(feature = "v3_10")]
fn get_activate_on_single_click(&self) -> bool;
#[cfg(feature = "v3_10")]
fn get_adjustment(&self) -> Option<Adjustment>;
#[cfg(feature = "v3_10")]
fn get_row_at_index(&self, index_: i32) -> Option<ListBoxRow>;
#[cfg(feature = "v3_10")]
fn get_row_at_y(&self, y: i32) -> Option<ListBoxRow>;
#[cfg(feature = "v3_10")]
fn get_selected_row(&self) -> Option<ListBoxRow>;
#[cfg(feature = "v3_14")]
fn get_selected_rows(&self) -> Vec<ListBoxRow>;
#[cfg(feature = "v3_10")]
fn get_selection_mode(&self) -> SelectionMode;
#[cfg(feature = "v3_10")]
fn insert<P: IsA<Widget>>(&self, child: &P, position: i32);
#[cfg(feature = "v3_10")]
fn invalidate_filter(&self);
#[cfg(feature = "v3_10")]
fn invalidate_headers(&self);
#[cfg(feature = "v3_10")]
fn invalidate_sort(&self);
#[cfg(feature = "v3_10")]
fn prepend<P: IsA<Widget>>(&self, child: &P);
#[cfg(feature = "v3_14")]
fn select_all(&self);
#[cfg(feature = "v3_10")]
fn select_row<'a, P: Into<Option<&'a ListBoxRow>>>(&self, row: P);
//#[cfg(feature = "v3_14")]
//fn selected_foreach<P: Into<Option</*Unimplemented*/Fundamental: Pointer>>>(&self, func: /*Unknown conversion*//*Unimplemented*/ListBoxForeachFunc, data: P);
#[cfg(feature = "v3_10")]
fn set_activate_on_single_click(&self, single: bool);
#[cfg(feature = "v3_10")]
fn set_adjustment<'a, P: Into<Option<&'a Adjustment>>>(&self, adjustment: P);
//#[cfg(feature = "v3_10")]
//fn set_filter_func<'a, P: Into<Option<&'a /*Unimplemented*/ListBoxFilterFunc>>, Q: Into<Option</*Unimplemented*/Fundamental: Pointer>>>(&self, filter_func: P, user_data: Q, destroy: /*Unknown conversion*//*Unimplemented*/DestroyNotify);
//#[cfg(feature = "v3_10")]
//fn set_header_func<'a, P: Into<Option<&'a /*Unimplemented*/ListBoxUpdateHeaderFunc>>, Q: Into<Option</*Unimplemented*/Fundamental: Pointer>>>(&self, update_header: P, user_data: Q, destroy: /*Unknown conversion*//*Unimplemented*/DestroyNotify);
#[cfg(feature = "v3_10")]
fn set_placeholder<'a, P: IsA<Widget> + 'a, Q: Into<Option<&'a P>>>(&self, placeholder: Q);
#[cfg(feature = "v3_10")]
fn set_selection_mode(&self, mode: SelectionMode);
//#[cfg(feature = "v3_10")]
//fn set_sort_func<'a, P: Into<Option<&'a /*Unimplemented*/ListBoxSortFunc>>, Q: Into<Option</*Unimplemented*/Fundamental: Pointer>>>(&self, sort_func: P, user_data: Q, destroy: /*Unknown conversion*//*Unimplemented*/DestroyNotify);
#[cfg(feature = "v3_14")]
fn unselect_all(&self);
#[cfg(feature = "v3_14")]
fn unselect_row(&self, row: &ListBoxRow);
fn get_property_activate_on_single_click(&self) -> bool;
fn set_property_activate_on_single_click(&self, activate_on_single_click: bool);
fn get_property_selection_mode(&self) -> SelectionMode;
fn set_property_selection_mode(&self, selection_mode: SelectionMode);
fn connect_activate_cursor_row<F: Fn(&Self) + 'static>(&self, f: F) -> u64;
fn connect_move_cursor<F: Fn(&Self, MovementStep, i32) + 'static>(&self, f: F) -> u64;
#[cfg(feature = "v3_10")]
fn connect_row_activated<F: Fn(&Self, &ListBoxRow) + 'static>(&self, f: F) -> u64;
#[cfg(feature = "v3_10")]
fn connect_row_selected<F: Fn(&Self, &Option<ListBoxRow>) + 'static>(&self, f: F) -> u64;
#[cfg(feature = "v3_14")]
fn connect_select_all<F: Fn(&Self) + 'static>(&self, f: F) -> u64;
#[cfg(feature = "v3_14")]
fn connect_selected_rows_changed<F: Fn(&Self) + 'static>(&self, f: F) -> u64;
fn connect_toggle_cursor_row<F: Fn(&Self) + 'static>(&self, f: F) -> u64;
#[cfg(feature = "v3_14")]
fn connect_unselect_all<F: Fn(&Self) + 'static>(&self, f: F) -> u64;
}
impl<O: IsA<ListBox> + IsA<glib::object::Object>> ListBoxExt for O {
//#[cfg(feature = "v3_16")]
//fn bind_model<'a, 'b, P: IsA</*Ignored*/gio::ListModel> + 'a, Q: Into<Option<&'a P>>, R: Into<Option<&'b /*Unimplemented*/ListBoxCreateWidgetFunc>>, S: Into<Option</*Unimplemented*/Fundamental: Pointer>>>(&self, model: Q, create_widget_func: R, user_data: S, user_data_free_func: /*Unknown conversion*//*Unimplemented*/DestroyNotify) {
// unsafe { TODO: call ffi::gtk_list_box_bind_model() }
//}
#[cfg(feature = "v3_10")]
fn drag_highlight_row(&self, row: &ListBoxRow) {
unsafe {
ffi::gtk_list_box_drag_highlight_row(self.to_glib_none().0, row.to_glib_none().0);
}
}
#[cfg(feature = "v3_10")]
fn drag_unhighlight_row(&self) {
unsafe {
ffi::gtk_list_box_drag_unhighlight_row(self.to_glib_none().0);
}
}
#[cfg(feature = "v3_10")]
fn get_activate_on_single_click(&self) -> bool {
unsafe {
from_glib(ffi::gtk_list_box_get_activate_on_single_click(self.to_glib_none().0))
}
}
#[cfg(feature = "v3_10")]
fn get_adjustment(&self) -> Option<Adjustment> {
unsafe {
from_glib_none(ffi::gtk_list_box_get_adjustment(self.to_glib_none().0))
}
}
#[cfg(feature = "v3_10")]
fn get_row_at_index(&self, index_: i32) -> Option<ListBoxRow> {
unsafe {
from_glib_none(ffi::gtk_list_box_get_row_at_index(self.to_glib_none().0, index_))
}
}
#[cfg(feature = "v3_10")]
fn get_row_at_y(&self, y: i32) -> Option<ListBoxRow> {
unsafe {
from_glib_none(ffi::gtk_list_box_get_row_at_y(self.to_glib_none().0, y))
}
}
#[cfg(feature = "v3_10")]
fn get_selected_row(&self) -> Option<ListBoxRow> {
unsafe {
from_glib_none(ffi::gtk_list_box_get_selected_row(self.to_glib_none().0))
}
}
#[cfg(feature = "v3_14")]
fn get_selected_rows(&self) -> Vec<ListBoxRow> {
unsafe {
FromGlibPtrContainer::from_glib_container(ffi::gtk_list_box_get_selected_rows(self.to_glib_none().0))
}
}
#[cfg(feature = "v3_10")]
fn get_selection_mode(&self) -> SelectionMode {
unsafe {
from_glib(ffi::gtk_list_box_get_selection_mode(self.to_glib_none().0))
}
}
#[cfg(feature = "v3_10")]
fn insert<P: IsA<Widget>>(&self, child: &P, position: i32) {
unsafe {
ffi::gtk_list_box_insert(self.to_glib_none().0, child.to_glib_none().0, position);
}
}
#[cfg(feature = "v3_10")]
fn invalidate_filter(&self) {
unsafe {
ffi::gtk_list_box_invalidate_filter(self.to_glib_none().0);
}
}
#[cfg(feature = "v3_10")]
fn invalidate_headers(&self) {
unsafe {
ffi::gtk_list_box_invalidate_headers(self.to_glib_none().0);
}
}
#[cfg(feature = "v3_10")]
fn invalidate_sort(&self) {
unsafe {
ffi::gtk_list_box_invalidate_sort(self.to_glib_none().0);
}
}
#[cfg(feature = "v3_10")]
fn prepend<P: IsA<Widget>>(&self, child: &P) {
unsafe {
ffi::gtk_list_box_prepend(self.to_glib_none().0, child.to_glib_none().0);
}
}
#[cfg(feature = "v3_14")]
fn select_all(&self) {
unsafe {
ffi::gtk_list_box_select_all(self.to_glib_none().0);
}
}
#[cfg(feature = "v3_10")]
fn select_row<'a, P: Into<Option<&'a ListBoxRow>>>(&self, row: P) {
let row = row.into();
let row = row.to_glib_none();
unsafe {
ffi::gtk_list_box_select_row(self.to_glib_none().0, row.0);
}
}
//#[cfg(feature = "v3_14")]
//fn selected_foreach<P: Into<Option</*Unimplemented*/Fundamental: Pointer>>>(&self, func: /*Unknown conversion*//*Unimplemented*/ListBoxForeachFunc, data: P) {
// unsafe { TODO: call ffi::gtk_list_box_selected_foreach() }
//}
#[cfg(feature = "v3_10")]
fn set_activate_on_single_click(&self, single: bool) {
unsafe {
ffi::gtk_list_box_set_activate_on_single_click(self.to_glib_none().0, single.to_glib());
}
}
#[cfg(feature = "v3_10")]
fn set_adjustment<'a, P: Into<Option<&'a Adjustment>>>(&self, adjustment: P) {
let adjustment = adjustment.into();
let adjustment = adjustment.to_glib_none();
unsafe {
ffi::gtk_list_box_set_adjustment(self.to_glib_none().0, adjustment.0);
}
}
//#[cfg(feature = "v3_10")]
//fn set_filter_func<'a, P: Into<Option<&'a /*Unimplemented*/ListBoxFilterFunc>>, Q: Into<Option</*Unimplemented*/Fundamental: Pointer>>>(&self, filter_func: P, user_data: Q, destroy: /*Unknown conversion*//*Unimplemented*/DestroyNotify) {
// unsafe { TODO: call ffi::gtk_list_box_set_filter_func() }
//}
//#[cfg(feature = "v3_10")]
//fn set_header_func<'a, P: Into<Option<&'a /*Unimplemented*/ListBoxUpdateHeaderFunc>>, Q: Into<Option</*Unimplemented*/Fundamental: Pointer>>>(&self, update_header: P, user_data: Q, destroy: /*Unknown conversion*//*Unimplemented*/DestroyNotify) {
// unsafe { TODO: call ffi::gtk_list_box_set_header_func() }
//}
#[cfg(feature = "v3_10")]
fn set_placeholder<'a, P: IsA<Widget> + 'a, Q: Into<Option<&'a P>>>(&self, placeholder: Q) {
let placeholder = placeholder.into();
let placeholder = placeholder.to_glib_none();
unsafe {
ffi::gtk_list_box_set_placeholder(self.to_glib_none().0, placeholder.0);
}
}
#[cfg(feature = "v3_10")]
fn set_selection_mode(&self, mode: SelectionMode) {
unsafe {
ffi::gtk_list_box_set_selection_mode(self.to_glib_none().0, mode.to_glib());
}
}
//#[cfg(feature = "v3_10")]
//fn set_sort_func<'a, P: Into<Option<&'a /*Unimplemented*/ListBoxSortFunc>>, Q: Into<Option</*Unimplemented*/Fundamental: Pointer>>>(&self, sort_func: P, user_data: Q, destroy: /*Unknown conversion*//*Unimplemented*/DestroyNotify) {
// unsafe { TODO: call ffi::gtk_list_box_set_sort_func() }
//}
#[cfg(feature = "v3_14")]
fn unselect_all(&self) {
unsafe {
ffi::gtk_list_box_unselect_all(self.to_glib_none().0);
}
}
#[cfg(feature = "v3_14")]
fn unselect_row(&self, row: &ListBoxRow) {
unsafe {
ffi::gtk_list_box_unselect_row(self.to_glib_none().0, row.to_glib_none().0);
}
}
fn get_property_activate_on_single_click(&self) -> bool {
let mut value = Value::from(&false);
unsafe {
gobject_ffi::g_object_get_property(self.to_glib_none().0, "activate-on-single-click".to_glib_none().0, value.to_glib_none_mut().0);
}
value.get().unwrap()
}
fn set_property_activate_on_single_click(&self, activate_on_single_click: bool) {
unsafe {
gobject_ffi::g_object_set_property(self.to_glib_none().0, "activate-on-single-click".to_glib_none().0, Value::from(&activate_on_single_click).to_glib_none().0);
}
}
fn get_property_selection_mode(&self) -> SelectionMode {
let mut value = Value::from(&0);
unsafe {
gobject_ffi::g_object_get_property(self.to_glib_none().0, "selection-mode".to_glib_none().0, value.to_glib_none_mut().0);
from_glib(transmute(value.get::<i32>().unwrap()))
}
}
fn set_property_selection_mode(&self, selection_mode: SelectionMode) {
let selection_mode = selection_mode.to_glib() as i32;
unsafe {
gobject_ffi::g_object_set_property(self.to_glib_none().0, "selection-mode".to_glib_none().0, Value::from(&selection_mode).to_glib_none().0);
}
}
fn connect_activate_cursor_row<F: Fn(&Self) + 'static>(&self, f: F) -> u64 {
unsafe {
let f: Box_<Box_<Fn(&Self) + 'static>> = Box_::new(Box_::new(f));
connect(self.to_glib_none().0, "activate-cursor-row",
transmute(activate_cursor_row_trampoline::<Self> as usize), Box_::into_raw(f) as *mut _)
}
}
<|fim▁hole|> connect(self.to_glib_none().0, "move-cursor",
transmute(move_cursor_trampoline::<Self> as usize), Box_::into_raw(f) as *mut _)
}
}
#[cfg(feature = "v3_10")]
fn connect_row_activated<F: Fn(&Self, &ListBoxRow) + 'static>(&self, f: F) -> u64 {
unsafe {
let f: Box_<Box_<Fn(&Self, &ListBoxRow) + 'static>> = Box_::new(Box_::new(f));
connect(self.to_glib_none().0, "row-activated",
transmute(row_activated_trampoline::<Self> as usize), Box_::into_raw(f) as *mut _)
}
}
#[cfg(feature = "v3_10")]
fn connect_row_selected<F: Fn(&Self, &Option<ListBoxRow>) + 'static>(&self, f: F) -> u64 {
unsafe {
let f: Box_<Box_<Fn(&Self, &Option<ListBoxRow>) + 'static>> = Box_::new(Box_::new(f));
connect(self.to_glib_none().0, "row-selected",
transmute(row_selected_trampoline::<Self> as usize), Box_::into_raw(f) as *mut _)
}
}
#[cfg(feature = "v3_14")]
fn connect_select_all<F: Fn(&Self) + 'static>(&self, f: F) -> u64 {
unsafe {
let f: Box_<Box_<Fn(&Self) + 'static>> = Box_::new(Box_::new(f));
connect(self.to_glib_none().0, "select-all",
transmute(select_all_trampoline::<Self> as usize), Box_::into_raw(f) as *mut _)
}
}
#[cfg(feature = "v3_14")]
fn connect_selected_rows_changed<F: Fn(&Self) + 'static>(&self, f: F) -> u64 {
unsafe {
let f: Box_<Box_<Fn(&Self) + 'static>> = Box_::new(Box_::new(f));
connect(self.to_glib_none().0, "selected-rows-changed",
transmute(selected_rows_changed_trampoline::<Self> as usize), Box_::into_raw(f) as *mut _)
}
}
fn connect_toggle_cursor_row<F: Fn(&Self) + 'static>(&self, f: F) -> u64 {
unsafe {
let f: Box_<Box_<Fn(&Self) + 'static>> = Box_::new(Box_::new(f));
connect(self.to_glib_none().0, "toggle-cursor-row",
transmute(toggle_cursor_row_trampoline::<Self> as usize), Box_::into_raw(f) as *mut _)
}
}
#[cfg(feature = "v3_14")]
fn connect_unselect_all<F: Fn(&Self) + 'static>(&self, f: F) -> u64 {
unsafe {
let f: Box_<Box_<Fn(&Self) + 'static>> = Box_::new(Box_::new(f));
connect(self.to_glib_none().0, "unselect-all",
transmute(unselect_all_trampoline::<Self> as usize), Box_::into_raw(f) as *mut _)
}
}
}
unsafe extern "C" fn activate_cursor_row_trampoline<P>(this: *mut ffi::GtkListBox, f: glib_ffi::gpointer)
where P: IsA<ListBox> {
callback_guard!();
let f: &Box_<Fn(&P) + 'static> = transmute(f);
f(&ListBox::from_glib_none(this).downcast_unchecked())
}
unsafe extern "C" fn move_cursor_trampoline<P>(this: *mut ffi::GtkListBox, object: ffi::GtkMovementStep, p0: libc::c_int, f: glib_ffi::gpointer)
where P: IsA<ListBox> {
callback_guard!();
let f: &Box_<Fn(&P, MovementStep, i32) + 'static> = transmute(f);
f(&ListBox::from_glib_none(this).downcast_unchecked(), from_glib(object), p0)
}
#[cfg(feature = "v3_10")]
unsafe extern "C" fn row_activated_trampoline<P>(this: *mut ffi::GtkListBox, row: *mut ffi::GtkListBoxRow, f: glib_ffi::gpointer)
where P: IsA<ListBox> {
callback_guard!();
let f: &Box_<Fn(&P, &ListBoxRow) + 'static> = transmute(f);
f(&ListBox::from_glib_none(this).downcast_unchecked(), &from_glib_none(row))
}
#[cfg(feature = "v3_10")]
unsafe extern "C" fn row_selected_trampoline<P>(this: *mut ffi::GtkListBox, row: *mut ffi::GtkListBoxRow, f: glib_ffi::gpointer)
where P: IsA<ListBox> {
callback_guard!();
let f: &Box_<Fn(&P, &Option<ListBoxRow>) + 'static> = transmute(f);
f(&ListBox::from_glib_none(this).downcast_unchecked(), &from_glib_none(row))
}
#[cfg(feature = "v3_14")]
unsafe extern "C" fn select_all_trampoline<P>(this: *mut ffi::GtkListBox, f: glib_ffi::gpointer)
where P: IsA<ListBox> {
callback_guard!();
let f: &Box_<Fn(&P) + 'static> = transmute(f);
f(&ListBox::from_glib_none(this).downcast_unchecked())
}
#[cfg(feature = "v3_14")]
unsafe extern "C" fn selected_rows_changed_trampoline<P>(this: *mut ffi::GtkListBox, f: glib_ffi::gpointer)
where P: IsA<ListBox> {
callback_guard!();
let f: &Box_<Fn(&P) + 'static> = transmute(f);
f(&ListBox::from_glib_none(this).downcast_unchecked())
}
unsafe extern "C" fn toggle_cursor_row_trampoline<P>(this: *mut ffi::GtkListBox, f: glib_ffi::gpointer)
where P: IsA<ListBox> {
callback_guard!();
let f: &Box_<Fn(&P) + 'static> = transmute(f);
f(&ListBox::from_glib_none(this).downcast_unchecked())
}
#[cfg(feature = "v3_14")]
unsafe extern "C" fn unselect_all_trampoline<P>(this: *mut ffi::GtkListBox, f: glib_ffi::gpointer)
where P: IsA<ListBox> {
callback_guard!();
let f: &Box_<Fn(&P) + 'static> = transmute(f);
f(&ListBox::from_glib_none(this).downcast_unchecked())
}<|fim▁end|> | fn connect_move_cursor<F: Fn(&Self, MovementStep, i32) + 'static>(&self, f: F) -> u64 {
unsafe {
let f: Box_<Box_<Fn(&Self, MovementStep, i32) + 'static>> = Box_::new(Box_::new(f)); |
<|file_name|>filter.rs<|end_file_name|><|fim▁begin|>use crate::fmt;
use crate::iter::{adapters::SourceIter, FusedIterator, InPlaceIterable};
use crate::ops::Try;
/// An iterator that filters the elements of `iter` with `predicate`.
///
/// This `struct` is created by the [`filter`] method on [`Iterator`]. See its
/// documentation for more.
///
/// [`filter`]: Iterator::filter
/// [`Iterator`]: trait.Iterator.html
#[must_use = "iterators are lazy and do nothing unless consumed"]
#[stable(feature = "rust1", since = "1.0.0")]
#[derive(Clone)]
pub struct Filter<I, P> {
// Used for `SplitWhitespace` and `SplitAsciiWhitespace` `as_str` methods
pub(crate) iter: I,
predicate: P,
}
impl<I, P> Filter<I, P> {
pub(in crate::iter) fn new(iter: I, predicate: P) -> Filter<I, P> {
Filter { iter, predicate }
}
}
#[stable(feature = "core_impl_debug", since = "1.9.0")]
impl<I: fmt::Debug, P> fmt::Debug for Filter<I, P> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.debug_struct("Filter").field("iter", &self.iter).finish()
}
}
fn filter_fold<T, Acc>(
mut predicate: impl FnMut(&T) -> bool,
mut fold: impl FnMut(Acc, T) -> Acc,
) -> impl FnMut(Acc, T) -> Acc {
move |acc, item| if predicate(&item) { fold(acc, item) } else { acc }
}
fn filter_try_fold<'a, T, Acc, R: Try<Output = Acc>>(
predicate: &'a mut impl FnMut(&T) -> bool,
mut fold: impl FnMut(Acc, T) -> R + 'a,
) -> impl FnMut(Acc, T) -> R + 'a {
move |acc, item| if predicate(&item) { fold(acc, item) } else { try { acc } }
}
#[stable(feature = "rust1", since = "1.0.0")]
impl<I: Iterator, P> Iterator for Filter<I, P>
where
P: FnMut(&I::Item) -> bool,
{
type Item = I::Item;
#[inline]
fn next(&mut self) -> Option<I::Item> {
self.iter.find(&mut self.predicate)
}
#[inline]
fn size_hint(&self) -> (usize, Option<usize>) {
let (_, upper) = self.iter.size_hint();
(0, upper) // can't know a lower bound, due to the predicate
}
// this special case allows the compiler to make `.filter(_).count()`
// branchless. Barring perfect branch prediction (which is unattainable in
// the general case), this will be much faster in >90% of cases (containing
// virtually all real workloads) and only a tiny bit slower in the rest.
//
// Having this specialization thus allows us to write `.filter(p).count()`
// where we would otherwise write `.map(|x| p(x) as usize).sum()`, which is
// less readable and also less backwards-compatible to Rust before 1.10.
//
// Using the branchless version will also simplify the LLVM byte code, thus
// leaving more budget for LLVM optimizations.
#[inline]
fn count(self) -> usize {
#[inline]
fn to_usize<T>(mut predicate: impl FnMut(&T) -> bool) -> impl FnMut(T) -> usize {
move |x| predicate(&x) as usize
}
self.iter.map(to_usize(self.predicate)).sum()
}
#[inline]
fn try_fold<Acc, Fold, R>(&mut self, init: Acc, fold: Fold) -> R
where
Self: Sized,
Fold: FnMut(Acc, Self::Item) -> R,
R: Try<Output = Acc>,
{
self.iter.try_fold(init, filter_try_fold(&mut self.predicate, fold))
}
#[inline]
fn fold<Acc, Fold>(self, init: Acc, fold: Fold) -> Acc
where
Fold: FnMut(Acc, Self::Item) -> Acc,
{
self.iter.fold(init, filter_fold(self.predicate, fold))
}
}
#[stable(feature = "rust1", since = "1.0.0")]
impl<I: DoubleEndedIterator, P> DoubleEndedIterator for Filter<I, P>
where
P: FnMut(&I::Item) -> bool,<|fim▁hole|> }
#[inline]
fn try_rfold<Acc, Fold, R>(&mut self, init: Acc, fold: Fold) -> R
where
Self: Sized,
Fold: FnMut(Acc, Self::Item) -> R,
R: Try<Output = Acc>,
{
self.iter.try_rfold(init, filter_try_fold(&mut self.predicate, fold))
}
#[inline]
fn rfold<Acc, Fold>(self, init: Acc, fold: Fold) -> Acc
where
Fold: FnMut(Acc, Self::Item) -> Acc,
{
self.iter.rfold(init, filter_fold(self.predicate, fold))
}
}
#[stable(feature = "fused", since = "1.26.0")]
impl<I: FusedIterator, P> FusedIterator for Filter<I, P> where P: FnMut(&I::Item) -> bool {}
#[unstable(issue = "none", feature = "inplace_iteration")]
unsafe impl<S: Iterator, P, I: Iterator> SourceIter for Filter<I, P>
where
P: FnMut(&I::Item) -> bool,
I: SourceIter<Source = S>,
{
type Source = S;
#[inline]
unsafe fn as_inner(&mut self) -> &mut S {
// SAFETY: unsafe function forwarding to unsafe function with the same requirements
unsafe { SourceIter::as_inner(&mut self.iter) }
}
}
#[unstable(issue = "none", feature = "inplace_iteration")]
unsafe impl<I: InPlaceIterable, P> InPlaceIterable for Filter<I, P> where P: FnMut(&I::Item) -> bool {}<|fim▁end|> | {
#[inline]
fn next_back(&mut self) -> Option<I::Item> {
self.iter.rfind(&mut self.predicate) |
<|file_name|>XDwgDirectReader.cpp<|end_file_name|><|fim▁begin|>// XDwgDirectReader.cpp: implementation of the XDwgDirectReader class.
//
//////////////////////////////////////////////////////////////////////
#include "stdafx.h"
#include <stdio.h>
#include <stdlib.h>
#include <string.h>
#include <math.h>
#include "atlbase.h"
#include "XDwgDirectReader.h"
#include "db.h"
#include "DwgEntityDumper.h"
#include "ExSystemServices.h"
#include "ExHostAppServices.h"
#include "RxDynamicModule.h"
//////////////////////////////////////////////////////////////////////////
/////////////DwgReaderServices//////////////////////////////////////////////
class DwgReaderServices : public ExSystemServices, public ExHostAppServices
{
protected:
ODRX_USING_HEAP_OPERATORS(ExSystemServices);
};
OdRxObjectImpl<DwgReaderServices> svcs;
ExProtocolExtension theProtocolExtensions;
const CString g_szEntityType = "ENTITY_TYPE";
//gisÊý¾Ý¸ñÍø´óС
const double DEFAULT_GIS_GRID_SIZE = 120.0;
//////////////////////////////////////////////////////////////////////
// Construction/Destruction
//////////////////////////////////////////////////////////////////////
XDWGReader::XDWGReader()
{
//³õʼ»¯DwgDirect¿â
odInitialize(&svcs);
theProtocolExtensions.initialize();
//ĬÈ϶ÁÈ¡CAD²ÎÊýÉèÖÃ
m_IsReadPolygon = FALSE;
m_IsLine2Polygon = FALSE;
m_IsBreakBlock = FALSE;
m_IsReadInvisible = FALSE;
m_IsJoinXDataAttrs = FALSE;
m_IsReadBlockPoint = TRUE;
m_IsCreateAnnotation = TRUE;
m_iUnbreakBlockMode = 0;
m_pSpRef = NULL;
m_dAnnoScale = 1;
m_bConvertAngle = TRUE;
m_pProgressBar = NULL;
m_pLogRec = NULL;
InitAOPointers();
m_Regapps.RemoveAll();
m_unExplodeBlocks.RemoveAll();
m_bFinishedCreateFtCls = FALSE;
m_StepNum = 5000;
}
XDWGReader::~XDWGReader()
{
m_unExplodeBlocks.RemoveAll();
theProtocolExtensions.uninitialize();
odUninitialize();
if (m_pLogRec != NULL)
{
delete m_pLogRec;
}
}
//////////////////////////////////////////////////////////////////////////
//¼òÒªÃèÊö : ɾ³ýÒÑ´æÔÚµÄÒªËØÀà
//ÊäÈë²ÎÊý :
//·µ »Ø Öµ :
//ÐÞ¸ÄÈÕÖ¾ :
//////////////////////////////////////////////////////////////////////////
void XDWGReader::CheckDeleteFtCls(IFeatureWorkspace* pFtWS, CString sFtClsName)
{
if (pFtWS == NULL) return;
IFeatureClass* pFtCls = NULL;
pFtWS->OpenFeatureClass(CComBSTR(sFtClsName), &pFtCls);
if (pFtCls != NULL)
{
IDatasetPtr pDs = pFtCls;
if (pDs != NULL)
{
pDs->Delete();
}
}
}
/********************************************************************
¼òÒªÃèÊö : ÅúÁ¿¶ÁÈ¡Ç°µÄ×¼±¸¹¤×÷
ÊäÈë²ÎÊý :
·µ »Ø Öµ :
ÐÞ¸ÄÈÕÖ¾ :
*********************************************************************/
BOOL XDWGReader::PrepareReadDwg(IWorkspace* pTargetWS, IDataset* pTargetDataset, ISpatialReference* pSpRef)
{
try
{
m_pTargetWS = pTargetWS;
//³õʼ»¯Ö¸Õë
//InitAOPointers();
//ÎÞ·¨¶ÁÈ¡µÄʵÌå¸öÊý
m_lUnReadEntityNum = 0;
//////////////////////////////////////////////////////////////////////////
IFeatureDatasetPtr pFeatDataset(pTargetDataset);
if (pSpRef == NULL)
{
ISpatialReferencePtr pUnknownSpRef(CLSID_UnknownCoordinateSystem);
m_pSpRef = pUnknownSpRef.Detach();
m_pSpRef->SetDomain(0.0, 1000000000, 0.0, 1000000000);
}
else
{
m_pSpRef = pSpRef;
}
//////////////////////////////////////////////////////////////////////////
//ÉèÖÃΪ¸ß¾«¶È£¬·ñÔòÎÞ·¨´´½¨±í»òFEATURECLASS
IControlPrecision2Ptr pControlPrecision(m_pSpRef);
if (pControlPrecision != NULL)
{
pControlPrecision->put_IsHighPrecision(VARIANT_TRUE);
}
//ÉèÖÿռä²Î¿¼¾«¶ÈÖµ
ISpatialReferenceResolutionPtr spatialReferenceResolution = m_pSpRef;
spatialReferenceResolution->SetDefaultMResolution();
spatialReferenceResolution->SetDefaultZResolution();
spatialReferenceResolution->SetDefaultXYResolution();
//ÉèÖÿռä×ø±êÎó²îÖµ
ISpatialReferenceTolerancePtr spatialReferenceTolerance = m_pSpRef;
spatialReferenceTolerance->SetDefaultMTolerance();
spatialReferenceTolerance->SetDefaultZTolerance();
spatialReferenceTolerance->SetDefaultXYTolerance();
m_bFinishedCreateFtCls = FALSE;
return TRUE;
}
catch (...)
{
WriteLog("³õʼ»¯Òì³£,Çë¼ì²é¹¤×÷¿Õ¼äºÍ¿Õ¼ä²Î¿¼ÊÇ·ñÕýÈ·.");
return FALSE;
}
}
//////////////////////////////////////////////////////////////////////////
//¼òÒªÃèÊö : ´´½¨Ä¿±êÒªËØÀà
//ÊäÈë²ÎÊý :
//·µ »Ø Öµ :
//ÐÞ¸ÄÈÕÖ¾ :
//////////////////////////////////////////////////////////////////////////
BOOL XDWGReader::CreateTargetAllFeatureClass()
{
try
{
IFeatureWorkspacePtr pFtWS(m_pTargetWS);
if (pFtWS == NULL)
return FALSE;
HRESULT hr;
CString sInfoText;
//´´½¨ÏµÍ³±í½á¹¹
IFieldsPtr ipFieldsPoint = 0;
IFieldsPtr ipFieldsLine = 0;
IFieldsPtr ipFieldsPolygon = 0;
IFieldsPtr ipFieldsText = 0;
IFieldsPtr ipFieldsAnnotation = 0;
//Éú³ÉÆÕͨµãÀà×Ö¶Î
CreateDwgPointFields(m_pSpRef, &ipFieldsPoint);
//Éú³É×¢¼ÇµãÀà×Ö¶Î
CreateDwgTextPointFields(m_pSpRef, &ipFieldsText);
//Éú³ÉÏßÒªËØÀà×Ö¶Î
CreateDwgLineFields(m_pSpRef, &ipFieldsLine);
//Éú³ÉÃæÒªËØÀà×Ö¶Î
CreateDwgPolygonFields(m_pSpRef, &ipFieldsPolygon);
//Éú³É×¢¼Çͼ²ã×Ö¶Î
CreateDwgAnnotationFields(m_pSpRef, &ipFieldsAnnotation);
//////////////////////////////////////////////////////////////////////////
//Ôö¼ÓÀ©Õ¹ÊôÐÔ×Ö¶Î
if (m_IsJoinXDataAttrs && m_Regapps.GetCount() > 0)
{
IFieldsEditPtr ipEditFieldsPoint = ipFieldsPoint;
IFieldsEditPtr ipEditFieldsLine = ipFieldsLine;
IFieldsEditPtr ipEditFieldsPolygon = ipFieldsPolygon;
IFieldsEditPtr ipEditFieldsText = ipFieldsText;
IFieldsEditPtr ipEditFieldsAnnotation = ipFieldsAnnotation;
CString sRegappName;
for (int i = 0; i < m_Regapps.GetCount(); i++)
{
//´´½¨À©Õ¹ÊôÐÔ×Ö¶Î
IFieldPtr ipField(CLSID_Field);
IFieldEditPtr ipFieldEdit = ipField;
sRegappName = m_Regapps.GetAt(m_Regapps.FindIndex(i));
CComBSTR bsStr = sRegappName;
ipFieldEdit->put_Name(bsStr);
ipFieldEdit->put_AliasName(bsStr);
ipFieldEdit->put_Type(esriFieldTypeString);
ipFieldEdit->put_Length(2000);
long lFldIndex = 0;
ipEditFieldsPoint->FindField(bsStr, &lFldIndex);
if (lFldIndex == -1)
{
ipEditFieldsPoint->AddField(ipField);
}
ipEditFieldsLine->FindField(bsStr, &lFldIndex);
if (lFldIndex == -1)
{
ipEditFieldsLine->AddField(ipField);
}
ipEditFieldsPolygon->FindField(bsStr, &lFldIndex);
if (lFldIndex == -1)
{
ipEditFieldsPolygon->AddField(ipField);
}
ipEditFieldsText->FindField(bsStr, &lFldIndex);
if (lFldIndex == -1)
{
ipEditFieldsText->AddField(ipField);
}
ipEditFieldsAnnotation->FindField(bsStr, &lFldIndex);
if (lFldIndex == -1)
{
ipEditFieldsAnnotation->AddField(ipField);
}
}
}
//Èç¹ûÓÐͼ²ãÏÈɾ³ý
CheckDeleteFtCls(pFtWS, "Point");
CheckDeleteFtCls(pFtWS, "TextPoint");
CheckDeleteFtCls(pFtWS, "Line");
CheckDeleteFtCls(pFtWS, "Polygon");
CheckDeleteFtCls(pFtWS, "Annotation");
CheckDeleteFtCls(pFtWS, "ExtendTable");
//´´½¨µãͼ²ã
hr = CreateDatasetFeatureClass(pFtWS, NULL, ipFieldsPoint, CComBSTR("Point"), esriFTSimple, m_pFeatClassPoint);
if (m_pFeatClassPoint != NULL)
{
hr = m_pFeatClassPoint->Insert(VARIANT_TRUE, &m_pPointFeatureCursor);
if (FAILED(hr))
{
sInfoText.Format("´´½¨µãFeatureCursorʧ°Ü:%s", CatchErrorInfo());
WriteLog(sInfoText);
}
hr = m_pFeatClassPoint->CreateFeatureBuffer(&m_pPointFeatureBuffer);
if (FAILED(hr))
{
sInfoText.Format("´´½¨µãFeautureBufferʧ°Ü:%s", CatchErrorInfo());
WriteLog(sInfoText);
}
}
else
{
sInfoText.Format("´´½¨PointÒªËØÀàʧ°Ü:%s", CatchErrorInfo());
WriteLog(sInfoText);
return FALSE;
}
//´´½¨ÏßÒªËØÀà
hr = CreateDatasetFeatureClass(pFtWS, NULL, ipFieldsLine, CComBSTR("Line"), esriFTSimple, m_pFeatClassLine);
if (m_pFeatClassLine != NULL)
{
hr = m_pFeatClassLine->Insert(VARIANT_TRUE, &m_pLineFeatureCursor);
if (FAILED(hr))
{
sInfoText.Format("´´½¨ÏßFeatureCursorʧ°Ü:%s", CatchErrorInfo());
WriteLog(sInfoText);
}
hr = m_pFeatClassLine->CreateFeatureBuffer(&m_pLineFeatureBuffer);
if (FAILED(hr))
{
sInfoText.Format("´´½¨ÏßFeautureBufferʧ°Ü:%s", CatchErrorInfo());
WriteLog(sInfoText);
}
}
else
{
sInfoText.Format("´´½¨LineÒªËØÀàʧ°Ü:%s", CatchErrorInfo());
WriteLog(sInfoText);
return FALSE;
}
if (m_IsReadPolygon || m_IsLine2Polygon)
{
//´´½¨ÃæÒªËØÀà
hr = CreateDatasetFeatureClass(pFtWS, NULL, ipFieldsPolygon, CComBSTR("Polygon"), esriFTSimple, m_pFeatClassPolygon);
if (m_pFeatClassPolygon != NULL)
{
hr = m_pFeatClassPolygon->Insert(VARIANT_TRUE, &m_pPolygonFeatureCursor);
if (FAILED(hr))
{
sInfoText.Format("´´½¨ÃæFeatureCursorʧ°Ü:%s", CatchErrorInfo());
WriteLog(sInfoText);
}
hr = m_pFeatClassPolygon->CreateFeatureBuffer(&m_pPolygonFeatureBuffer);
if (FAILED(hr))
{
sInfoText.Format("´´½¨ÃæFeautureBufferʧ°Ü:%s", CatchErrorInfo());
WriteLog(sInfoText);
}
}
else
{
sInfoText.Format("´´½¨PolygonÒªËØÀàʧ°Ü:%s", CatchErrorInfo());
WriteLog(sInfoText);
return FALSE;
}
}
//arcgis ×¢¼Çͼ²ã
if (m_IsCreateAnnotation)
{
m_pAnnoFtCls = CreateAnnoFtCls(m_pTargetWS, "Annotation", ipFieldsAnnotation);
if (m_pAnnoFtCls != NULL)
{
//ÉèÖÃÏÖʵ±ÈÀý³ß
IUnknownPtr pUnk;
m_pAnnoFtCls->get_Extension(&pUnk);
IAnnoClassAdminPtr pAnnoClassAdmin = pUnk;
if (pAnnoClassAdmin != NULL)
{
hr = pAnnoClassAdmin->put_ReferenceScale(m_dAnnoScale);
hr = pAnnoClassAdmin->UpdateProperties();
}
hr = m_pAnnoFtCls->Insert(VARIANT_TRUE, &m_pAnnoFeatureCursor);
if (FAILED(hr))
{
sInfoText.Format("´´½¨×¢¼ÇFeatureCursorʧ°Ü:%s", CatchErrorInfo());
WriteLog(sInfoText);
}
hr = m_pAnnoFtCls->CreateFeatureBuffer(&m_pAnnoFeatureBuffer);
if (FAILED(hr))
{
sInfoText.Format("´´½¨×¢¼ÇFeautureBufferʧ°Ü:%s", CatchErrorInfo());
WriteLog(sInfoText);
}
}
else
{
sInfoText.Format("´´½¨AnnotationÒªËØÀàʧ°Ü:%s", CatchErrorInfo());
WriteLog(sInfoText);
return FALSE;
}
//´´½¨×¢¼Çͼ²ã×ÖÌå
IFontDispPtr pFont(CLSID_StdFont);
IFontPtr fnt = pFont;
fnt->put_Name(CComBSTR("ËÎÌå"));
CY cy;
cy.int64 = 9;
fnt->put_Size(cy);
m_pAnnoTextFont = pFont.Detach();
}
else
{
//Îı¾µã
hr = CreateDatasetFeatureClass(pFtWS, NULL, ipFieldsText, CComBSTR("TextPoint"), esriFTSimple, m_pFeatClassText);
if (m_pFeatClassText != NULL)
{
hr = m_pFeatClassText->Insert(VARIANT_TRUE, &m_pTextFeatureCursor);
if (FAILED(hr))
{
sInfoText.Format("´´½¨Îı¾µãFeatureCursorʧ°Ü:%s", CatchErrorInfo());
WriteLog(sInfoText);
}
hr = m_pFeatClassText->CreateFeatureBuffer(&m_pTextFeatureBuffer);
if (FAILED(hr))
{
sInfoText.Format("´´½¨Îı¾FeautureBufferʧ°Ü:%s", CatchErrorInfo());
WriteLog(sInfoText);
}
}
else
{
sInfoText.Format("´´½¨TextÒªËØÀàʧ°Ü:%s", CatchErrorInfo());
WriteLog(sInfoText);
return FALSE;
}
}
//À©Õ¹ÊôÐÔ±í
hr = CreateExtendTable(pFtWS, CComBSTR("ExtendTable"), &m_pExtendTable);
if (m_pExtendTable != NULL)
{
hr = m_pExtendTable->Insert(VARIANT_TRUE, &m_pExtentTableRowCursor);
if (FAILED(hr))
{
sInfoText.Format("´´½¨TableBufferʧ°Ü:%s", CatchErrorInfo());
WriteLog(sInfoText);
}
hr = m_pExtendTable->CreateRowBuffer(&m_pExtentTableRowBuffer);
if (FAILED(hr))
{
sInfoText.Format("´´½¨TableCursorʧ°Ü:%s", CatchErrorInfo());
WriteLog(sInfoText);
}
}
else
{
sInfoText.Format("´´½¨ExtendTableʧ°Ü:%s", CatchErrorInfo());
WriteLog(sInfoText);
return FALSE;
}
m_bFinishedCreateFtCls = TRUE;
return TRUE;
}
catch (...)
{
return FALSE;
}
}
//////////////////////////////////////////////////////////////////////////
//¼òÒªÃèÊö : Öð¸ö¶ÁÈ¡CADÎļþ
//ÊäÈë²ÎÊý :
//·µ »Ø Öµ :
//ÐÞ¸ÄÈÕÖ¾ :
//////////////////////////////////////////////////////////////////////////
BOOL XDWGReader::ReadFile(LPCTSTR lpdwgFilename)
{
try
{
//Éú³ÉÄ¿±êGDBͼ²ã
if (!m_bFinishedCreateFtCls)
{
if (!CreateTargetAllFeatureClass())
{
WriteLog("´´½¨Ä¿±êÒªËØÀà³öÏÖÒì³££¬ÎÞ·¨½øÐиñʽת»»¡£");
return FALSE;
}
}
//Çå³ý²»¶ÁµÄͼ²ãÁбí
m_UnReadLayers.RemoveAll();
//´ò¿ªCADÎļþ²¢¶ÁÈ¡
//µÃµ½DWGͼÃûºÍÈÕÖ¾ÎļþÃû
CString szDatasetName ;
CString szLogFileName;
int index;
CString sFileName = lpdwgFilename;
sFileName = sFileName.Mid(sFileName.ReverseFind('\\') + 1);
index = ((CString) lpdwgFilename).ReverseFind('\\');
int ilength = ((CString) lpdwgFilename).GetLength();
szDatasetName = CString(lpdwgFilename).Right(ilength - 1 - index);
index = szDatasetName.ReverseFind('.');
szDatasetName = szDatasetName.Left(index);
m_strDwgName = szDatasetName;
// ¼Ç¼¿ªÊ¼´¦Àíʱ¼ä
CTime tStartTime = CTime::GetCurrentTime();
CString sInfoText;
sInfoText.Format("¿ªÊ¼¶Á %s Îļþ.", lpdwgFilename);
WriteLog(sInfoText);
if (m_pProgressBar != NULL)
{
m_pProgressBar->SetPos(0);
CString sProgressText;
sProgressText.Format("ÕýÔÚ¶ÁÈ¡%s, ÇëÉÔºò...", lpdwgFilename);
m_pProgressBar->SetWindowText(sProgressText);
}
OdDbDatabasePtr pDb;
pDb = svcs.readFile(lpdwgFilename, false, false, Oda::kShareDenyReadWrite);
if (pDb.isNull())
{
WriteLog("DWGÎļþΪ¿Õ!");
}
// ´ÓdwgÎļþ»ñµÃ·¶Î§
sInfoText.Format("ͼ·ù·¶Î§: ×îСX×ø±ê:%f, ×î´óX×ø±ê:%f, ×îСY×ø±ê:%f, ×î´óY×ø±ê:%f \n", 0.9 * pDb->getEXTMIN().x, 1.1 * pDb->getEXTMAX().x, 0.9 * pDb->getEXTMIN().y, 1.1 * pDb->getEXTMAX().y);
WriteLog(sInfoText);
//¶ÁCADÎļþ
ReadBlock(pDb);
pDb.release();
//¼Ç¼Íê³Éʱ¼ä
CTime tEndTime = CTime::GetCurrentTime();
CTimeSpan span = tEndTime - tStartTime;
sInfoText.Format("%sÎļþת»»Íê³É!¹²ºÄʱ%dʱ%d·Ö%dÃë.", lpdwgFilename, span.GetHours(), span.GetMinutes(), span.GetSeconds());
WriteLog(sInfoText);
WriteLog("==============================================================");
return TRUE;
}
catch (...)
{
CString sErr;
sErr.Format("%sÎļþ²»´æÔÚ»òÕý´¦ÓÚ´ò¿ª×´Ì¬£¬ÎÞ·¨½øÐÐÊý¾Ý¶ÁÈ¡£¬Çë¼ì²é¡£", lpdwgFilename);
WriteLog(sErr);
return FALSE;
}
}
/********************************************************************
¼òÒªÃèÊö : ÉèÖÃÈÕÖ¾´æ·Å·¾¶
ÊäÈë²ÎÊý :
·µ »Ø Öµ :
ÈÕ ÆÚ : 2008/09/27,BeiJing.
×÷ Õß : ×ÚÁÁ <[email protected]>
ÐÞ¸ÄÈÕÖ¾ :
*********************************************************************/
void XDWGReader::PutLogFilePath(CString sLogFile)
{
m_pLogRec = new CLogRecorder(sLogFile);
m_sLogFilePath = sLogFile;
}
//дÈÕÖ¾Îļþ
void XDWGReader::WriteLog(CString sLog)
{
if (m_pLogRec == NULL)
{
return;
}
if (!sLog.IsEmpty())
{
m_pLogRec->WriteLog(sLog);
}
}
//½áÊøDWGµÄ¶ÁÈ¡¹¤×÷
BOOL XDWGReader::CommitReadDwg()
{
//ÊÍ·ÅÓõ½µÄ¶ÔÏó
ReleaseAOs();
if (m_pLogRec != NULL)
{
m_pLogRec->CloseFile();
}
return TRUE;
}
void XDWGReader::ReadHeader(OdDbDatabase* pDb)
{
OdString sName = pDb->getFilename();
CString sInfoText;
sInfoText.Format("Database was loaded from:%s", sName.c_str());
WriteLog(sInfoText);
OdDb::DwgVersion vVer = pDb->originalFileVersion();
sInfoText.Format("File version is: %s", OdDb::DwgVersionToStr(vVer));
WriteLog(sInfoText);
sInfoText.Format("Header Variables: %f,%f", pDb->getLTSCALE(), pDb->getATTMODE());
WriteLog(sInfoText);
OdDbDate d = pDb->getTDCREATE();
short month, day, year, hour, min, sec, msec;
d.getDate(month, day, year);
d.getTime(hour, min, sec, msec);
sInfoText.Format(" TDCREATE: %d-%d-%d,%d:%d:%d", month, day, year, hour, min, sec);
WriteLog(sInfoText);
d = pDb->getTDUPDATE();
d.getDate(month, day, year);
d.getTime(hour, min, sec, msec);
sInfoText.Format(" TDCREATE: %d-%d-%d,%d:%d:%d", month, day, year, hour, min, sec);
WriteLog(sInfoText);
}
void XDWGReader::ReadSymbolTable(OdDbObjectId tableId)
{
OdDbSymbolTablePtr pTable = tableId.safeOpenObject();
CString sInfoText;
sInfoText.Format("±íÃû:%s", pTable->isA()->name());
WriteLog(sInfoText);
OdDbSymbolTableIteratorPtr pIter = pTable->newIterator();
for (pIter->start(); !pIter->done(); pIter->step())
{
OdDbSymbolTableRecordPtr pTableRec = pIter->getRecordId().safeOpenObject();
CString TableRecName;
TableRecName.Format("%s", pTableRec->getName().c_str());
TableRecName.MakeUpper();
sInfoText.Format(" %s<%s>", TableRecName, pTableRec->isA()->name());
WriteLog(sInfoText);
}
}
void XDWGReader::ReadLayers(OdDbDatabase* pDb)
{
OdDbLayerTablePtr pLayers = pDb->getLayerTableId().safeOpenObject();
CString sInfoText;
sInfoText.Format("²ãÃû:%s", pLayers->desc()->name());
WriteLog(sInfoText);
OdDbSymbolTableIteratorPtr pIter = pLayers->newIterator();
for (pIter->start(); !pIter->done(); pIter->step())
{
OdDbLayerTableRecordPtr pLayer = pIter->getRecordId().safeOpenObject();
CString LayerName;
LayerName.Format("%s", pLayer->desc()->name());
LayerName.MakeUpper();
sInfoText.Format(" %s<%s>,layercolor:%d,%s,%s,%s,%s", pLayer->getName().c_str(), LayerName, pLayer->colorIndex(), pLayer->isOff() ? "Off" : "On", pLayer->isLocked() ? "Locked" : "Unlocked", pLayer->isFrozen() ? "Frozen" : "UnFrozen", pLayer->isDependent() ? "Dep. on XRef" : "Not dep. on XRef");
WriteLog(sInfoText);
}
}
/************************************************************************
¼òÒªÃèÊö : ¶ÁDWGÀ©Õ¹ÊôÐÔ,²¢Ð´Èëµ½À©Õ¹ÊôÐÔ±íÖÐ
ÊäÈë²ÎÊý :
·µ »Ø Öµ :
ÐÞ¸ÄÈÕÖ¾ :
************************************************************************/
void XDWGReader::ReadExtendAttribs(OdResBuf* xIter, CString sEntityHandle)
{
if (xIter == 0 || m_pExtendTable == NULL)
return;
CMapStringToPtr mapExtraRes; //±£´æËùÓÐÓ¦Óü°À©Õ¹ÊôÐÔ (Ó¦ÓÃÃû+CStringList*)
//Registered Application Name
CString sAppName;
CString sExtendValue;
//CStringList lstExtendValues;//ËùÓÐÀ©Õ¹ÊôÐÔ,ÓÃ[]ºÅ·Ö¸ô
OdResBuf* xIterLoop = xIter;
for (; xIterLoop != 0; xIterLoop = xIterLoop->next())
{
int code = xIterLoop->restype();
switch (OdDxfCode::_getType(code))
{
case OdDxfCode::Name:
case OdDxfCode::String:
sExtendValue.Format("%s", xIterLoop->getString().c_str());
break;
case OdDxfCode::Bool:
sExtendValue.Format("%d", xIterLoop->getBool());
break;
case OdDxfCode::Integer8:
sExtendValue.Format("%d", xIterLoop->getInt8());
break;
case OdDxfCode::Integer16:
sExtendValue.Format("%d", xIterLoop->getInt16());
break;
case OdDxfCode::Integer32:
sExtendValue.Format("%d", xIterLoop->getInt32());
break;
case OdDxfCode::Double:
sExtendValue.Format("%f", xIterLoop->getDouble());
break;
case OdDxfCode::Angle:
sExtendValue.Format("%f", xIterLoop->getDouble());
break;
case OdDxfCode::Point:
{
OdGePoint3d p = xIterLoop->getPoint3d();
sExtendValue.Format("%f,%f,%f", p.x, p.y, p.z);
}
break;
case OdDxfCode::BinaryChunk:
sExtendValue = "<Binary Data>";
break;
case OdDxfCode::Handle:
case OdDxfCode::LayerName:
sExtendValue.Format("%s", xIterLoop->getString().c_str());
break;
case OdDxfCode::ObjectId:
case OdDxfCode::SoftPointerId:
case OdDxfCode::HardPointerId:
case OdDxfCode::SoftOwnershipId:
case OdDxfCode::HardOwnershipId:
{
OdDbHandle h = xIterLoop->getHandle();
sExtendValue.Format("%s", h.ascii());
}
break;
case OdDxfCode::Unknown:
default:
sExtendValue = "Unknown";
break;
}
//Registered Application Name
if (code == OdResBuf::kDxfRegAppName)
{
sAppName = sExtendValue;
//Éú³É¶ÔÓ¦ÓÚ¸ÃÓ¦ÓõÄStringList
CStringList* pLstExtra = new CStringList();
mapExtraRes.SetAt(sAppName, pLstExtra);
}
else if (code == OdResBuf::kDxfXdAsciiString || code == OdResBuf::kDxfXdReal)
{
void* rValue;
if (mapExtraRes.Lookup(sAppName, rValue))
{
CStringList* pLstExtra = (CStringList*)rValue;
//±£´æµ½¶ÔÓ¦ÓÚ¸ÃAPPNameµÄListÖÐ
pLstExtra->AddTail(sExtendValue);
}
}
}
POSITION mapPos = mapExtraRes.GetStartPosition();
while (mapPos)
{
CString sAppName;
void* rValue;
mapExtraRes.GetNextAssoc(mapPos, sAppName, rValue);
CStringList* pList = (CStringList*) rValue;
HRESULT hr;
long lFieldIndex;
CComBSTR bsStr;
CComVariant vtVal;
//Éú³ÉÀ©Õ¹ÊôÐÔ×Ö·û´®
POSITION pos = pList->GetHeadPosition();
if (pos != NULL)
{
CString sAllValues = "[" + pList->GetNext(pos) + "]";
while (pos != NULL)
{
sAllValues = sAllValues + "[" + pList->GetNext(pos) + "]";
}
//Add Extend data to Extend Table
bsStr = "Handle";
m_pExtendTable->FindField(bsStr, &lFieldIndex);
vtVal = sEntityHandle;
m_pExtentTableRowBuffer->put_Value(lFieldIndex, vtVal);
bsStr = "BaseName";
m_pExtendTable->FindField(bsStr, &lFieldIndex);
vtVal = m_strDwgName;
m_pExtentTableRowBuffer->put_Value(lFieldIndex, vtVal);
bsStr = "XDataName";
m_pExtendTable->FindField(bsStr, &lFieldIndex);
sAppName.MakeUpper();
vtVal = sAppName;
m_pExtentTableRowBuffer->put_Value(lFieldIndex, vtVal);
bsStr = "XDataValue";
m_pExtendTable->FindField(bsStr, &lFieldIndex);
vtVal = sAllValues;
m_pExtentTableRowBuffer->put_Value(lFieldIndex, vtVal);
hr = m_pExtentTableRowCursor->InsertRow(m_pExtentTableRowBuffer, &m_TableId);
if (FAILED(hr))
{
WriteLog("À©Õ¹ÊôÐÔ¶Áȡʧ°Ü:" + CatchErrorInfo());
}
}
m_pExtentTableRowCursor->Flush();
vtVal.Clear();
bsStr.Empty();
pList->RemoveAll();
delete pList;
}
mapExtraRes.RemoveAll();
}
/********************************************************************
¼òÒªÃèÊö : ÖØÃüÃûͼ²ãÃû£¬Õë¶Ô¼ÃÄÏÏîÄ¿µãºÍÏßÔÚͬÒÔͼ²ãÏ£¬·ÖÀë³öÏßÒªËص½Ö¸¶¨Í¼²ãÏÂ
ÊäÈë²ÎÊý :
·µ »Ø Öµ :
ÐÞ¸ÄÈÕÖ¾ :
*********************************************************************/
/*void XDWGReader::RenameEntityLayerName(CString sDwgOriLayerName, IFeatureBuffer*& pFeatBuffer)
{
if (m_lstRenameLayers.GetCount() <= 0)
{
return;
}
//ÖØÐÂÖ¸¶¨Í¼²ã£¬°ÑÏß²ã·Åµ½ÏàÓ¦µÄ¸¨ÖúÏßͼ²ãÖÐÈ¥
POSITION pos = m_lstRenameLayers.GetHeadPosition();
while (pos != NULL)
{
RenameLayerRecord* pRenameRec = m_lstRenameLayers.GetNext(pos);
if (pRenameRec->sDWG_LAYERNAME_CONTAINS.IsEmpty()||pRenameRec->sNEW_DWG_LAYERNAME.IsEmpty()||pRenameRec->sNEW_LAYERTYPE.IsEmpty())
{
continue;
}
if (pRenameRec->sNEW_LAYERTYPE.CompareNoCase("Line") == 0)
{
CStringList lstKeys;
CString sKeyStr;
CString sLayerNameContains = pRenameRec->sDWG_LAYERNAME_CONTAINS;
int iPos = sLayerNameContains.Find(',');
while (iPos != -1)
{
sKeyStr = sLayerNameContains.Mid(0, iPos);
sLayerNameContains = sLayerNameContains.Mid(iPos + 1);
iPos = sLayerNameContains.Find(',');
lstKeys.AddTail(sKeyStr);
}
sKeyStr = sLayerNameContains;
lstKeys.AddTail(sKeyStr);
bool bFindKey = true;
for (int ki=0; ki< lstKeys.GetCount(); ki++)
{
sKeyStr = lstKeys.GetAt(lstKeys.FindIndex(ki));
if (sDwgOriLayerName.Find(sKeyStr) == -1)
{
bFindKey = false;
break;
}
}
//Èç¹û°üº¬ËùÓÐÌØÕ÷Öµ£¬Ôò¸üÃû
if (bFindKey)
{
AddAttributes("Layer", pRenameRec->sNEW_DWG_LAYERNAME, pFeatBuffer);
break;
}
}
}
}
*/
/********************************************************************
¼òÒªÃèÊö : ²åÈë×¢¼ÇÒªËØ
ÊäÈë²ÎÊý :
·µ »Ø Öµ :
ÐÞ¸ÄÈÕÖ¾ :
*********************************************************************/
void XDWGReader::InsertAnnoFeature(OdRxObject* pEnt)
{
HRESULT hr;
OdDbEntityPtr pOdDbEnt = pEnt;
if (pOdDbEnt.isNull()) return;
CString sEntType = pOdDbEnt->isA()->name();
if (sEntType.Compare("AcDbMText") == 0 || sEntType.Compare("AcDbText") == 0 || sEntType.Compare("AcDbAttribute") == 0)
{
// Ìí¼ÓÊôÐÔ
AddBaseAttributes(pOdDbEnt, "Annotation", m_pAnnoFeatureBuffer);
//CString sTempVal;
CString sText = "";
double dHeight = 0;
double dWeight = 0;
double dAngle = 0;
OdGePoint3d textPos;
//¶ÔÆëµã
OdGePoint3d alignPoint;
esriTextHorizontalAlignment horizAlign = esriTHALeft;
esriTextVerticalAlignment vertAlign = esriTVABaseline;
CString sTextStyle = "STANDARD";
CString sHeight = "0";
CString sElevation = "0";
CString sThickness = "0";
CString sOblique = "0";
if (sEntType.Compare("AcDbMText") == 0)
{
OdDbMTextPtr pMText = OdDbMTextPtr(pEnt);
//Îı¾ÄÚÈÝ
sText = pMText->contents();
int iPos = sText.ReverseFind(';');
sText = sText.Mid(iPos + 1);
sText.Replace("{", "");
sText.Replace("}", "");
//Îı¾·ç¸ñ
OdDbSymbolTableRecordPtr symbolbRec = OdDbSymbolTableRecordPtr(pMText->textStyle().safeOpenObject());
if (!symbolbRec.isNull())
{
sTextStyle.Format("%s", symbolbRec->getName());
}
//¸ß¶È
sHeight.Format("%f", pMText->textHeight());
//¸ß³ÌÖµ
sElevation.Format("%f", pMText->location().z);
////Éú³É×¢¼ÇÐèÒªµÄ²ÎÊý////
//½Ç¶È
dAngle = pMText->rotation();
//¸ßºÍ¿í
dHeight = pMText->textHeight();
dWeight = pMText->width();
//λÖõã
textPos = pMText->location();
//ÉèÖöÔÆ뷽ʽ
if (pMText->horizontalMode() == OdDb::kTextLeft)
{
horizAlign = esriTHALeft;
}
else if (pMText->horizontalMode() == OdDb::kTextCenter)
{
horizAlign = esriTHACenter;
}
else if (pMText->horizontalMode() == OdDb::kTextRight)
{
horizAlign = esriTHARight;
}
else if (pMText->horizontalMode() == OdDb::kTextFit)
{
horizAlign = esriTHAFull;
}
if (pMText->verticalMode() == OdDb::kTextBase)
{
vertAlign = esriTVABaseline;
}
else if (pMText->verticalMode() == OdDb::kTextBottom)
{
vertAlign = esriTVABottom;
}
else if (pMText->verticalMode() == OdDb::kTextTop)
{
vertAlign = esriTVATop;
}
else if (pMText->verticalMode() == OdDb::kTextVertMid)
{
vertAlign = esriTVACenter;
}
}
else if (sEntType.Compare("AcDbText") == 0 || sEntType.Compare("AcDbAttribute") == 0)
{
OdDbTextPtr pText = OdDbTextPtr(pEnt);
//Îı¾ÄÚÈÝ
sText = pText->textString();
//Îı¾·ç¸ñ
OdDbSymbolTableRecordPtr symbolbRec = OdDbSymbolTableRecordPtr(pText->textStyle().safeOpenObject());
if (!symbolbRec.isNull())
{
sTextStyle.Format("%s", symbolbRec->getName());
}
//¸ß³ÌÖµ
sElevation.Format("%f", pText->position().z);
//¸ß¶È
sHeight.Format("%f", pText->height());
//ºñ¶È
sThickness.Format("%.f", pText->thickness());
//Çã½Ç
sOblique.Format("%f", pText->oblique());
////Éú³É×¢¼ÇÐèÒªµÄ²ÎÊý////
//½Ç¶È
dAngle = pText->rotation();
dHeight = pText->height();
dWeight = 0;
textPos = pText->position();
alignPoint = pText->alignmentPoint();
//if (textPos.x <= 0.0001 && textPos.y <= 0.0001) //Èç¹ûûÓжÔÆëµã£¬ÔòʹÓÃλÖõã
//{
// textPos = pText->position();
//}
CString tempstr;
tempstr.Format("%f", alignPoint.x);
AddAttributes("AlignPtX", tempstr, m_pAnnoFeatureBuffer);
tempstr.Format("%f", alignPoint.y);
AddAttributes("AlignPtY", tempstr, m_pAnnoFeatureBuffer);
//OdGePoint3dArray boundingPoints;
//pText->getBoundingPoints(boundingPoints);
//OdGePoint3d topLeft = boundingPoints[0];
//OdGePoint3d topRight = boundingPoints[1];
//OdGePoint3d bottomLeft = boundingPoints[2];
//OdGePoint3d bottomRight = boundingPoints[3];
//ÉèÖöÔÆ뷽ʽ
if (pText->horizontalMode() == OdDb::kTextLeft)
{
horizAlign = esriTHALeft;
}
else if (pText->horizontalMode() == OdDb::kTextCenter)
{
horizAlign = esriTHACenter;
}
else if (pText->horizontalMode() == OdDb::kTextRight)
{
horizAlign = esriTHARight;
}
else if (pText->horizontalMode() == OdDb::kTextFit)
{
horizAlign = esriTHAFull;
}
if (pText->verticalMode() == OdDb::kTextBase)
{
vertAlign = esriTVABaseline;
}
else if (pText->verticalMode() == OdDb::kTextBottom)
{
vertAlign = esriTVABottom;
}
else if (pText->verticalMode() == OdDb::kTextTop)
{
vertAlign = esriTVATop;
}
else if (pText->verticalMode() == OdDb::kTextVertMid)
{
vertAlign = esriTVACenter;
}
}
//ÉèÖÃ×¢¼ÇÎı¾·ç¸ñ
AddAttributes("TextStyle", sTextStyle, m_pAnnoFeatureBuffer);
AddAttributes("Height", sHeight, m_pAnnoFeatureBuffer);
AddAttributes("Elevation", sElevation, m_pAnnoFeatureBuffer);
AddAttributes("Thickness", sThickness, m_pAnnoFeatureBuffer);
AddAttributes("Oblique", sOblique, m_pAnnoFeatureBuffer);
//´´½¨ Element
ITextElementPtr pTextElement = MakeTextElementByStyle(sText, dAngle, dHeight, textPos.x, textPos.y, m_dAnnoScale, horizAlign, vertAlign);
IElementPtr pElement = pTextElement;
IAnnotationFeaturePtr pTarAnnoFeat = m_pAnnoFeatureBuffer;
hr = pTarAnnoFeat->put_Annotation(pElement);
PutExtendAttribsValue(m_pAnnoFeatureBuffer, OdDbEntityPtr(pEnt)->xData());
CComVariant OID;
hr = m_pAnnoFeatureCursor->InsertFeature(m_pAnnoFeatureBuffer, &OID);
if (FAILED(hr))
{
CString sInfoText;
sInfoText = "Annotation¶ÔÏóдÈëµ½PGDBʧ°Ü." + CatchErrorInfo();
WriteLog(sInfoText);
m_lUnReadEntityNum++;
}
}
}
//////////////////////////////////////////////////////////////////////////
//¼òÒªÃèÊö : ²åÈëCADÊôÐÔ¶ÔÏó
//ÊäÈë²ÎÊý :
//·µ »Ø Öµ :
//ÐÞ¸ÄÈÕÖ¾ :
//////////////////////////////////////////////////////////////////////////
void XDWGReader::InsertDwgAttribFeature(OdRxObject* pEnt)
{
HRESULT hr;
OdDbEntityPtr pOdDbEnt = pEnt;
if (pOdDbEnt.isNull()) return;
CString sEntType = pOdDbEnt->isA()->name();
if (strcmp(sEntType, "AcDbAttributeDefinition") == 0)
{
// Ìí¼ÓÊôÐÔ
AddBaseAttributes(pOdDbEnt, "Annotation", m_pAnnoFeatureBuffer);
//CString sTempVal;
CString sText = "";
double dHeight = 0;
double dWeight = 0;
double dAngle = 0;
OdGePoint3d textPos;
esriTextHorizontalAlignment horizAlign = esriTHALeft;
esriTextVerticalAlignment vertAlign = esriTVABaseline;
CString sTextStyle = "STANDARD";
CString sHeight = "0";
CString sElevation = "0";
CString sThickness = "0";
CString sOblique = "0";
OdDbAttributeDefinitionPtr pText = OdDbAttributeDefinitionPtr(pEnt);
//Îı¾ÄÚÈÝ
CString sTag = pText->tag();
CString sPrompt = pText->prompt();
sText = sTag;
//Îı¾·ç¸ñ
OdDbSymbolTableRecordPtr symbolbRec = OdDbSymbolTableRecordPtr(pText->textStyle().safeOpenObject());
if (!symbolbRec.isNull())
{
sTextStyle.Format("%s", symbolbRec->getName());
}
//¸ß³ÌÖµ
sElevation.Format("%f", pText->position().z);
//¸ß¶È
sHeight.Format("%f", pText->height());
//ºñ¶È
sThickness.Format("%.f", pText->thickness());
//Çã½Ç
sOblique.Format("%f", pText->oblique());
////Éú³É×¢¼ÇÐèÒªµÄ²ÎÊý////
//½Ç¶È
dAngle = pText->rotation();
dHeight = pText->height();
dWeight = 0;
textPos = pText->alignmentPoint();
if (textPos.x <= 0.0001 && textPos.y <= 0.0001) //Èç¹ûûÓжÔÆëµã£¬ÔòʹÓÃλÖõã
{
textPos = pText->position();
}
//ÉèÖöÔÆ뷽ʽ
if (pText->horizontalMode() == OdDb::kTextLeft)
{
horizAlign = esriTHALeft;
}
else if (pText->horizontalMode() == OdDb::kTextCenter)
{
horizAlign = esriTHACenter;
}
else if (pText->horizontalMode() == OdDb::kTextRight)
{
horizAlign = esriTHARight;
}
else if (pText->horizontalMode() == OdDb::kTextFit)
{
horizAlign = esriTHAFull;
}
if (pText->verticalMode() == OdDb::kTextBase)
{
vertAlign = esriTVABaseline;
}
else if (pText->verticalMode() == OdDb::kTextBottom)
{
vertAlign = esriTVABottom;
}
else if (pText->verticalMode() == OdDb::kTextTop)
{
vertAlign = esriTVATop;
}
else if (pText->verticalMode() == OdDb::kTextVertMid)
{
vertAlign = esriTVACenter;
}
//ÉèÖÃ×¢¼ÇÎı¾·ç¸ñ
AddAttributes("TextStyle", sTextStyle, m_pAnnoFeatureBuffer);
AddAttributes("Height", sHeight, m_pAnnoFeatureBuffer);
AddAttributes("Elevation", sElevation, m_pAnnoFeatureBuffer);
AddAttributes("Thickness", sThickness, m_pAnnoFeatureBuffer);
AddAttributes("Oblique", sOblique, m_pAnnoFeatureBuffer);
//´´½¨ Element
ITextElementPtr pTextElement = MakeTextElementByStyle(sText, dAngle, dHeight, textPos.x, textPos.y, m_dAnnoScale, horizAlign, vertAlign);
IElementPtr pElement = pTextElement;
IAnnotationFeaturePtr pTarAnnoFeat = m_pAnnoFeatureBuffer;
hr = pTarAnnoFeat->put_Annotation(pElement);
PutExtendAttribsValue(m_pAnnoFeatureBuffer, OdDbEntityPtr(pEnt)->xData());
CComVariant OID;
hr = m_pAnnoFeatureCursor->InsertFeature(m_pAnnoFeatureBuffer, &OID);
if (FAILED(hr))
{
CString sInfoText;
sInfoText = "Annotation¶ÔÏóдÈëµ½PGDBʧ°Ü." + CatchErrorInfo();
WriteLog(sInfoText);
m_lUnReadEntityNum++;
}
}
}
/********************************************************************
¼òÒªÃèÊö : Ìí¼ÓÀ©Õ¹ÊôÐÔÖµ
ÊäÈë²ÎÊý :
·µ »Ø Öµ :
ÐÞ¸ÄÈÕÖ¾ :
*********************************************************************/
BOOL XDWGReader::PutExtendAttribsValue(IFeatureBuffer*& pFtBuf, OdResBuf* xIter)
{
if (m_IsJoinXDataAttrs == FALSE || m_Regapps.GetCount() <= 0 || xIter == NULL)
{
return FALSE;
}
CMapStringToPtr mapExtraRes; //±£´æËùÓÐÓ¦Óü°À©Õ¹ÊôÐÔ (Ó¦ÓÃÃû+CStringList*)
//Registered Application Name
CString sAppName;
CString sExtendValue;
//CStringList lstExtendValues;//ËùÓÐÀ©Õ¹ÊôÐÔ,ÓÃ,ºÅ·Ö¸ô
OdResBuf* xIterLoop = xIter;
for (; xIterLoop != 0; xIterLoop = xIterLoop->next())
{
int code = xIterLoop->restype();
switch (OdDxfCode::_getType(code))
{
case OdDxfCode::Name:
case OdDxfCode::String:
sExtendValue.Format("%s", xIterLoop->getString().c_str());
break;
case OdDxfCode::Bool:
sExtendValue.Format("%d", xIterLoop->getBool());
break;
case OdDxfCode::Integer8:
sExtendValue.Format("%d", xIterLoop->getInt8());
break;
case OdDxfCode::Integer16:
sExtendValue.Format("%d", xIterLoop->getInt16());
break;
case OdDxfCode::Integer32:
sExtendValue.Format("%d", xIterLoop->getInt32());
break;
case OdDxfCode::Double:
sExtendValue.Format("%f", xIterLoop->getDouble());
break;
case OdDxfCode::Angle:
sExtendValue.Format("%f", xIterLoop->getDouble());
break;
case OdDxfCode::Point:
{
OdGePoint3d p = xIterLoop->getPoint3d();
sExtendValue.Format("%f,%f,%f", p.x, p.y, p.z);
}
break;
case OdDxfCode::BinaryChunk:
sExtendValue = "<Binary Data>";
break;
case OdDxfCode::Handle:
case OdDxfCode::LayerName:
sExtendValue.Format("%s", xIterLoop->getString().c_str());
break;
case OdDxfCode::ObjectId:
case OdDxfCode::SoftPointerId:
case OdDxfCode::HardPointerId:
case OdDxfCode::SoftOwnershipId:
case OdDxfCode::HardOwnershipId:
{
OdDbHandle h = xIterLoop->getHandle();
sExtendValue.Format("%s", h.ascii());
}
break;
case OdDxfCode::Unknown:
default:
sExtendValue = "Unknown";
break;
}
//Registered Application Name
if (code == OdResBuf::kDxfRegAppName)
{
sAppName = sExtendValue;
//Éú³É¶ÔÓ¦ÓÚ¸ÃÓ¦ÓõÄStringList
CStringList* pLstExtra = new CStringList();
mapExtraRes.SetAt(sAppName, pLstExtra);
}
else if (code == OdResBuf::kDxfXdAsciiString || code == OdResBuf::kDxfXdReal)
{
void* rValue;
if (mapExtraRes.Lookup(sAppName, rValue))
{
CStringList* pLstExtra = (CStringList*) rValue;
//±£´æµ½¶ÔÓ¦ÓÚ¸ÃAPPNameµÄListÖÐ
pLstExtra->AddTail(sExtendValue);
}
}
}
//µÃµ½×Ö¶Î
IFieldsPtr pFields;
pFtBuf->get_Fields(&pFields);
POSITION mapPos = mapExtraRes.GetStartPosition();
while (mapPos)
{
CString sAppName;
void* rValue;
mapExtraRes.GetNextAssoc(mapPos, sAppName, rValue);
CStringList* pList = (CStringList*) rValue;
long lIdx = 0;
pFields->FindField(CComBSTR(sAppName), &lIdx);
if (lIdx != -1)
{
CString sAllValues = "";
//Éú³ÉÀ©Õ¹ÊôÐÔ×Ö·û´®
POSITION pos = pList->GetHeadPosition();
if (pos != NULL)
{
sAllValues = pList->GetNext(pos);
while (pos != NULL)
{
sAllValues = sAllValues + "," + pList->GetNext(pos) ;
}
pFtBuf->put_Value(lIdx, CComVariant(sAllValues));
}
}
pList->RemoveAll();
delete pList;
}
mapExtraRes.RemoveAll();
return TRUE;
}
//////////////////////////////////////////////////////////////////////////
//¼òÒªÃèÊö : ¶ÁCADµÄÿ¸öʵÌåÒªËØ
//ÊäÈë²ÎÊý :
//·µ »Ø Öµ :
//ÐÞ¸ÄÈÕÖ¾ :
//////////////////////////////////////////////////////////////////////////
void XDWGReader::ReadEntity(OdDbObjectId id)
{
OdDbEntityPtr pEnt = id.safeOpenObject();
OdDbLayerTableRecordPtr pLayerTableRecord = pEnt->layerId().safeOpenObject();
CString sInfoText;
if ((pLayerTableRecord->isOff() || pLayerTableRecord->isLocked() || pLayerTableRecord->isFrozen()) && (m_IsReadInvisible == FALSE))
{
//±ÜÃâÈÕÖ¾Öظ´ÄÚÈÝ
CString sUnReadLayer = pEnt->layer().c_str();
POSITION pos = m_UnReadLayers.Find(sUnReadLayer);
if (pos == NULL)
{
m_UnReadLayers.AddTail(sUnReadLayer);
sInfoText.Format("<%s>²ãÒªËز»¿ÉÊÓ²»´¦Àí!", sUnReadLayer);
WriteLog(sInfoText);
}
m_lUnReadEntityNum++;
}
else
{
OdDbHandle hTmp;
char szEntityHandle[50] = {0};
hTmp = pEnt->getDbHandle();
hTmp.getIntoAsciiBuffer(szEntityHandle);
//¼Ç¼µ±Ç°handleÖµ
m_sEntityHandle = szEntityHandle;
//Çå¿ÕFeatureBuffer
CleanAllFeatureBuffers();
OdSmartPtr<OdDbEntity_Dumper> pEntDumper = pEnt;
IGeometryPtr pShape;
HRESULT hr;
CComVariant OID;
pEntDumper->m_DwgReader = this;
// »ñµÃ¼¸ºÎÊý¾Ý
pShape = pEntDumper->dump(pEnt);
if (pShape == NULL)
{
m_lUnReadEntityNum++;
return ;
}
//ÐÞÕý¿Õ¼ä²Î¿¼
hr = pShape->Project(m_pSpRef);
// Îı¾
CString sEntType = OdDbEntityPtr(pEnt)->isA()->name();
if ((strcmp(sEntType, "AcDbMText") == 0) || (strcmp(sEntType, "AcDbText") == 0) || (strcmp(sEntType, "AcDbShape") == 0))
{
if (m_IsCreateAnnotation)
{
//²åÈë×¢¼Ç¶ÔÏó
InsertAnnoFeature(pEnt);
}
else
{
hr = m_pTextFeatureBuffer->putref_Shape(pShape);
if (SUCCEEDED(hr))
{
AddBaseAttributes(pEnt, "Annotation", m_pTextFeatureBuffer);
//±àÂë¶ÔÕÕ
if (CompareCodes(m_pTextFeatureBuffer))
{
PutExtendAttribsValue(m_pTextFeatureBuffer, pEnt->xData());
hr = m_pTextFeatureCursor->InsertFeature(m_pTextFeatureBuffer, &OID);
if (FAILED(hr))
{
sInfoText = "Text¶ÔÏóдÈëµ½PGDBʧ°Ü¡£" + CatchErrorInfo();
WriteLog(sInfoText);
m_lUnReadEntityNum++;
}
}
}
else
{
sInfoText = "Text¶ÔÏó×ø±ê²»ÕýÈ·¡£" + CatchErrorInfo();
WriteLog(sInfoText);
m_lUnReadEntityNum++;
}
}
}
else
{
esriGeometryType shapeType;
pShape->get_GeometryType(&shapeType);
if (shapeType == esriGeometryPoint) //µã
{
hr = m_pPointFeatureBuffer->putref_Shape(pShape);
if (SUCCEEDED(hr))
{
AddBaseAttributes(pEnt, "Point", m_pPointFeatureBuffer);
//±àÂë¶ÔÕÕ
if (CompareCodes(m_pPointFeatureBuffer))
{
PutExtendAttribsValue(m_pPointFeatureBuffer, pEnt->xData());
hr = m_pPointFeatureCursor->InsertFeature(m_pPointFeatureBuffer, &OID);
if (FAILED(hr))
{
sInfoText = "Point¶ÔÏóдÈëµ½PGDBʧ°Ü." + CatchErrorInfo();
WriteLog(sInfoText);
m_lUnReadEntityNum++;
}
}
}
else
{
sInfoText = "Point¶ÔÏó×ø±ê²»ÕýÈ·." + CatchErrorInfo();
WriteLog(sInfoText);
m_lUnReadEntityNum++;
}
if (strcmp(pEnt->isA()->name(), "AcDbBlockReference") == 0)
m_lBlockNum++;
}
else if (shapeType == esriGeometryPolyline) //Ïß
{
hr = m_pLineFeatureBuffer->putref_Shape(pShape);
if (SUCCEEDED(hr))
{
AddBaseAttributes(pEnt, "Line", m_pLineFeatureBuffer);
CString sDwgLayer;
sDwgLayer.Format("%s", pEnt->layer().c_str());
if (CompareCodes(m_pLineFeatureBuffer))
{
PutExtendAttribsValue(m_pLineFeatureBuffer, pEnt->xData());
hr = m_pLineFeatureCursor->InsertFeature(m_pLineFeatureBuffer, &OID);
if (FAILED(hr))
{
IFieldsPtr pFlds;
m_pLineFeatureBuffer->get_Fields(&pFlds);
long numFields;
pFlds->get_FieldCount(&numFields);
for (int t = 0; t < numFields; t++)
{
CComVariant tVal;
IFieldPtr pFld;
pFlds->get_Field(t, &pFld);
CComBSTR bsName;
pFld->get_Name(&bsName);
m_pLineFeatureBuffer->get_Value(t, &tVal);
}
sInfoText = "Line¶ÔÏóдÈëµ½PGDBʧ°Ü." + CatchErrorInfo();
WriteLog(sInfoText);
m_lUnReadEntityNum++;
}
}
}
else
{
sInfoText = "Line¶ÔÏó×ø±ê²»ÕýÈ·." + CatchErrorInfo();
WriteLog(sInfoText);
m_lUnReadEntityNum++;
}
// Èç¹û±ÕºÏ¾ÍÔÙÉú³ÉÃæ
VARIANT_BOOL isclosed;
IPolylinePtr pPolyline(CLSID_Polyline);
pPolyline = pShape;
pPolyline->get_IsClosed(&isclosed);
if (isclosed && m_IsLine2Polygon)
{
IPolygonPtr pPolygon(CLSID_Polygon);
((ISegmentCollectionPtr) pPolygon)->AddSegmentCollection((ISegmentCollectionPtr) pPolyline);
IAreaPtr pArea = (IAreaPtr)pPolygon;
double dArea = 0.0;
pArea->get_Area(&dArea);
if (dArea < 0.0)
{
pPolygon->ReverseOrientation();
}
hr = m_pPolygonFeatureBuffer->putref_Shape((IGeometryPtr)pPolygon);
if (SUCCEEDED(hr))
{
AddBaseAttributes(pEnt, "Polygon", m_pPolygonFeatureBuffer);
//±àÂë¶ÔÕÕ
if (CompareCodes(m_pPolygonFeatureBuffer))
{
//¹Ò½ÓÀ©Õ¹ÊôÐÔ
PutExtendAttribsValue(m_pPolygonFeatureBuffer, pEnt->xData());
hr = m_pPolygonFeatureCursor->InsertFeature(m_pPolygonFeatureBuffer, &OID);
if (FAILED(hr))
{
sInfoText = "Polygon¶ÔÏóдÈëµ½PGDBʧ°Ü." + CatchErrorInfo();
WriteLog(sInfoText);
}
}
}
else
{
sInfoText = "Polyline¶ÔÏó×ø±ê²»ÕýÈ·." + CatchErrorInfo();
WriteLog(sInfoText);
}
}
}
else if (shapeType == esriGeometryPolygon) //Ãæ¡¢Ìî³ä
{
if(m_IsReadPolygon)
{
IPolygonPtr pPolygon(CLSID_Polygon);
pPolygon = pShape;
IAreaPtr pArea = (IAreaPtr)pPolygon;
double dArea = 0.0;
pArea->get_Area(&dArea);
if (dArea < 0.0)
{
pPolygon->ReverseOrientation();
}
hr = m_pPolygonFeatureBuffer->putref_Shape((IGeometryPtr)pPolygon);
if (SUCCEEDED(hr))
{
AddBaseAttributes(pEnt, "Polygon", m_pPolygonFeatureBuffer);
PutExtendAttribsValue(m_pPolygonFeatureBuffer, pEnt->xData());
hr = m_pPolygonFeatureCursor->InsertFeature(m_pPolygonFeatureBuffer, &OID);
if (FAILED(hr))
{
sInfoText = "Polygon¶ÔÏóдÈëµ½PGDBʧ°Ü." + CatchErrorInfo();
WriteLog(sInfoText);
m_lUnReadEntityNum++;
}
}
else
{
sInfoText = "Polygon¶ÔÏó×ø±ê²»ÕýÈ·." + CatchErrorInfo();
WriteLog(sInfoText);
m_lUnReadEntityNum++;
}
}
}
else
{
sInfoText.Format("%sͼ²ãÖÐHandleֵΪ:%s µÄÒªËØÎÞ·¨´¦Àí.", pEnt->layer().c_str(), szEntityHandle);
WriteLog(sInfoText);
//ÎÞ·¨Ê¶±ð¼ÆÊý¼Ó1
m_lUnReadEntityNum++;
}
}
//¶ÁÈ¡À©Õ¹ÊôÐÔµ½À©Õ¹ÊôÐÔ±í
ReadExtendAttribs(pEnt->xData(), szEntityHandle);
}
}
//¶ÁCADÎļþ
void XDWGReader::ReadBlock(OdDbDatabase* pDb)
{
// Open ModelSpace
OdDbBlockTableRecordPtr pBlock = pDb->getModelSpaceId().safeOpenObject();
// ³õʼ»¯
m_lBlockNum = 0;
m_bn = -1;
m_lEntityNum = 0;
//ÎÞ·¨¶ÁÈ¡µÄʵÌå¸öÊý
m_lUnReadEntityNum = 0;
m_vID = 0;
if (m_StepNum < 0)
m_StepNum = 5000;
// Get an entity iterator
OdDbObjectIteratorPtr pEntIter = pBlock->newIterator();
for (; !pEntIter->done(); pEntIter->step())
{
m_lEntityNum++;
}
//É趨½ø¶ÈÌõ·¶Î§
if (m_pProgressBar)
{
m_pProgressBar->SetRange(0, m_lEntityNum);
m_pProgressBar->SetPos(0);
}
pEntIter.release();
// For each entity in the block
pEntIter = pBlock->newIterator();
int iReadCount = 0;
for (; !pEntIter->done(); pEntIter->step())
{
try
{
ReadEntity(pEntIter->objectId());
}
catch (...)
{
char szEntityHandle[50] = {0};
pEntIter->objectId().getHandle().getIntoAsciiBuffer(szEntityHandle);
CString sErr;
sErr.Format("¶ÁÈ¡HandleΪ%sµÄʵÌå³öÏÖÒì³£.", szEntityHandle);
WriteLog(sErr);
}
//É趨½ø¶ÈÌõ²½³¤
if (m_pProgressBar)
{
m_pProgressBar->StepIt();
}
if (++iReadCount % m_StepNum == 0)
{
if (m_pPointFeatureCursor)
m_pPointFeatureCursor->Flush();
if (m_pTextFeatureCursor)
m_pTextFeatureCursor->Flush();
if (m_pLineFeatureCursor)
m_pLineFeatureCursor->Flush();
if (m_pAnnoFeatureCursor)
m_pAnnoFeatureCursor->Flush();
if (m_pPolygonFeatureCursor)
m_pPolygonFeatureCursor->Flush();
if (m_pExtentTableRowCursor)
m_pExtentTableRowCursor->Flush();
}
}
if (m_pPointFeatureCursor)
m_pPointFeatureCursor->Flush();
if (m_pTextFeatureCursor)
m_pTextFeatureCursor->Flush();
if (m_pLineFeatureCursor)
m_pLineFeatureCursor->Flush();
if (m_pAnnoFeatureCursor)
m_pAnnoFeatureCursor->Flush();
if (m_pPolygonFeatureCursor)
m_pPolygonFeatureCursor->Flush();
if (m_pExtentTableRowCursor)
m_pExtentTableRowCursor->Flush();
pEntIter.release();
CString sResult;
sResult.Format("´¦ÀíÒªËØ×ÜÊý:%d", m_lEntityNum - m_lUnReadEntityNum);
WriteLog(sResult);
}
// arcgis Ïà¹Øº¯Êý
HRESULT XDWGReader::AddBaseAttributes(OdDbEntity* pEnt, LPCTSTR strEnType, IFeatureBuffer*& pFeatureBuffer)
{
long lindex;
int ival ;
CString strval;
IFieldsPtr ipFields;
char buff[20];
OdDbHandle hTmp;
hTmp = pEnt->getDbHandle();
hTmp.getIntoAsciiBuffer(buff);
if (pFeatureBuffer == NULL)
return S_FALSE;
pFeatureBuffer->get_Fields(&ipFields);
//µÃµ½esri¼¸ºÎÀàÐÍ
CComBSTR bsStr;
CComVariant vtVal;
bsStr = g_szEntityType;
ipFields->FindField(bsStr, &lindex);
vtVal = strEnType;
pFeatureBuffer->put_Value(lindex, vtVal);
//µÃµ½dwg¼¸ºÎÀàÐÍ
bsStr = "DwgGeometry";
ipFields->FindField(bsStr, &lindex);
vtVal = pEnt->isA()->name();
pFeatureBuffer->put_Value(lindex, vtVal);
// µÃµ½dwgʵÌå±àºÅ
bsStr = "Handle";
ipFields->FindField(bsStr, &lindex);
vtVal = buff;
pFeatureBuffer->put_Value(lindex, vtVal);
// µÃµ½dwgͼÃû£¬¼´dwgÎļþÃû¡£ÒÔÈ·±£handleΨһ
bsStr = "BaseName";
ipFields->FindField(bsStr, &lindex);
vtVal = m_strDwgName;
pFeatureBuffer->put_Value(lindex, vtVal);
// µÃµ½dwg²ãÃû
bsStr = "Layer";
ipFields->FindField(bsStr, &lindex);
strval.Format("%s", pEnt->layer().c_str());
strval.MakeUpper();
vtVal = strval;
pFeatureBuffer->put_Value(lindex, vtVal);
// TRACE("Put Layer(AddBaseAttributes): "+ strval+" \r\n");
// µÃµ½dwg·ûºÅÑÕÉ«,Ö»Äܵõ½²ãµÄÑÕÉ«£¬Ó¦¸ÃÊÇÿ¸öÒªËصÄ
bsStr = "Color";
ipFields->FindField(bsStr, &lindex);
if (pEnt->colorIndex() > 255 || pEnt->colorIndex() < 1)
{
OdDbLayerTableRecordPtr pLayer = pEnt->layerId().safeOpenObject();
ival = pLayer->colorIndex();
}
else
ival = pEnt->colorIndex();
vtVal = ival;
pFeatureBuffer->put_Value(lindex, vtVal);
// µÃµ½ Linetype £¬¼Ç¼ÏßÐÍ
bsStr = "Linetype";
ipFields->FindField(bsStr, &lindex);
strval.Format("%s", pEnt->linetype().c_str());
strval.MakeUpper();
vtVal = strval;
pFeatureBuffer->put_Value(lindex, vtVal);
//¶ÔÏó¿É¼ûÐÔ£¨¿ÉÑ¡£©£º0 = ¿É¼û£»1 = ²»¿É¼û
// kInvisible 1 kVisible 0
bsStr = "Visible";
ipFields->FindField(bsStr, &lindex);
if (pEnt->visibility() == 1)
{
ival = 0;
}
else
{
ival = 1;
}
vtVal = ival;
pFeatureBuffer->put_Value(lindex, vtVal);
//À©Õ¹ÊôÐÔFeatureUID
//bsStr = "FEATURE_UID";
//ipFields->FindField(bsStr, &lindex);
//if (lindex != -1)
//{
// CString sFeatureUID = ReadFeatureUID(pEnt->xData());
// vtVal = sFeatureUID;
// pFeatureBuffer->put_Value(lindex, vtVal);
//}
vtVal.Clear();
bsStr.Empty();
return 0;
}
void XDWGReader::AddAttributes(LPCTSTR csFieldName, LPCTSTR csFieldValue, IFeatureBuffer*& pFeatureBuffer)
{
try
{
long lindex;
IFieldsPtr ipFields;
CString strval;
if (pFeatureBuffer == NULL)
return;
pFeatureBuffer->get_Fields(&ipFields);
CComBSTR bsStr = csFieldName;
ipFields->FindField(bsStr, &lindex);
if (lindex != -1)
{
CComVariant vtVal;
//°Ñ»¡¶Èֵת»»Îª½Ç¶ÈÖµ
if (m_bConvertAngle && (strcmp("Angle", csFieldName) == 0))
{
double dRadian = atof(csFieldValue);
double dAngle = dRadian * g_dAngleParam;
vtVal = dAngle;
}
else
{
vtVal = csFieldValue;
}
HRESULT hr = pFeatureBuffer->put_Value(lindex, vtVal);
vtVal.Clear();
}
bsStr.Empty();
}
catch (...)
{
CString sError;
sError.Format("%s×Ö¶ÎдÈë%sֵʱ³ö´í.", csFieldName, csFieldValue);
WriteLog(sError);
}
}
void XDWGReader::CleanAllFeatureBuffers()
{
if (m_pAnnoFeatureBuffer)
CleanFeatureBuffer(m_pAnnoFeatureBuffer);
if (m_pTextFeatureBuffer)
CleanFeatureBuffer(m_pTextFeatureBuffer);
if (m_pLineFeatureBuffer)
CleanFeatureBuffer(m_pLineFeatureBuffer);
if (m_pPointFeatureBuffer)
CleanFeatureBuffer(m_pPointFeatureBuffer);
if (m_pPolygonFeatureBuffer)
CleanFeatureBuffer(m_pPolygonFeatureBuffer);
}
//void XDWGReader::BlockIniAttributes()
//{
// if (m_pTextFeatureBuffer)
// IniBlockAttributes(m_pTextFeatureBuffer);
// if (m_pLineFeatureBuffer)
// IniBlockAttributes(m_pLineFeatureBuffer);
// if (m_pPointFeatureBuffer)
// IniBlockAttributes(m_pPointFeatureBuffer);
// if (m_pPolygonFeatureBuffer)
// IniBlockAttributes(m_pPolygonFeatureBuffer);
//}
//////////////////////////////////////////////////////////////////////////
//ÕÒ³ö²¢½â¾öÄÚ´æй©ÎÊÌâ by zl
void XDWGReader::CleanFeatureBuffer(IFeatureBuffer* pFeatureBuffer)
{
if (pFeatureBuffer == NULL)
return;
//ÊÍ·ÅÄÚ´æ
IGeometryPtr pShape;
HRESULT hr = pFeatureBuffer->get_Shape(&pShape);
if (SUCCEEDED(hr))
{
if (pShape != NULL)
{
pShape->SetEmpty();
}
}
IFieldsPtr ipFields;
long iFieldCount;
VARIANT_BOOL isEditable;
esriFieldType fieldType;
VARIANT emptyVal;
::VariantInit(&emptyVal);
CComVariant emptyStr = "";
pFeatureBuffer->get_Fields(&ipFields);
ipFields->get_FieldCount(&iFieldCount);
for (int i = 0; i < iFieldCount; i++)
{
IFieldPtr pFld;
ipFields->get_Field(i, &pFld);
pFld->get_Editable(&isEditable);
pFld->get_Type(&fieldType);
if (isEditable == VARIANT_TRUE && fieldType != esriFieldTypeGeometry)
{
if (fieldType == esriFieldTypeString)
{
pFeatureBuffer->put_Value(i, emptyStr);
}
else
{
pFeatureBuffer->put_Value(i, emptyVal);
}
}
}
}
//void XDWGReader::IniBlockAttributes(IFeatureBuffer* pFeatureBuffer)
//{
// long lindex;
// // double dbval;
// CString strval;
// IFieldsPtr ipFields;
// if (pFeatureBuffer == NULL)
// return;
//
// //ÊÍ·ÅÄÚ´æ
// IGeometry* pShape;
// HRESULT hr = pFeatureBuffer->get_Shape(&pShape);
// if (SUCCEEDED(hr))
// {
// if (pShape != NULL)
// {
// pShape->SetEmpty();
// }
// }
//
// // Çå¿Õ£¬·ñÔò»á±£ÁôÇ°Ò»¸öµÄÊôÐÔ
// pFeatureBuffer->get_Fields(&ipFields);
// CComBSTR bsStr;
// CComVariant vtVal;
// bsStr = "Thickness";
// ipFields->FindField(bsStr, &lindex);
// if (lindex != -1)
// {
// vtVal = 0;
// pFeatureBuffer->put_Value(lindex, vtVal);
// }
//
// bsStr = "Scale";
// ipFields->FindField(bsStr, &lindex);
// if (lindex != -1)
// {
// vtVal = 0;
// pFeatureBuffer->put_Value(lindex, vtVal);
// }
//
// bsStr = "Angle";
// ipFields->FindField(bsStr, &lindex);
// if (lindex != -1)
// {
// vtVal = 0;
// pFeatureBuffer->put_Value(lindex, vtVal);
// }
//
// bsStr = "Elevation";
// ipFields->FindField(bsStr, &lindex);
// if (lindex != -1)
// {
// vtVal = 0;
// pFeatureBuffer->put_Value(lindex, vtVal);
// }
//
// bsStr = "Width";
// ipFields->FindField(bsStr, &lindex);
// if (lindex != -1)
// {
// vtVal = 0;
// pFeatureBuffer->put_Value(lindex, vtVal);
// }
//
// bsStr.Empty();
//
// //IniExtraAttributes(pFeatureBuffer, ipFields);
//
// return;
//}
//void XDWGReader::OpenLogFile()
//{
// //if (m_pLogRec != NULL)
// //{
// // WinExec("Notepad.exe " + m_sLogFilePath, SW_SHOW);
// //}
//
// //if (m_LogList.GetCount() > 0)
// //{
// // COleDateTime dtCur = COleDateTime::GetCurrentTime();
// // CString sName = dtCur.Format("%y%m%d_%H%M%S");
// // CString sLogFileName;
// // sLogFileName.Format("%sDwgת»»ÈÕÖ¾_%s.log", GetLogPath(), sName);
//
// // CStdioFile f3(sLogFileName, CFile::modeCreate | CFile::modeWrite | CFile::typeText);
// // for (POSITION pos = m_LogList.GetHeadPosition(); pos != NULL;)
// // {
// // f3.WriteString(m_LogList.GetNext(pos) + "\n");
// // }
// // f3.Close();
// // WinExec("Notepad.exe " + sLogFileName, SW_SHOW);
// // m_LogList.RemoveAll();
// //}
//}
CString XDWGReader::CatchErrorInfo()
{
IErrorInfoPtr ipError;
CComBSTR bsStr;
CString sError;
::GetErrorInfo(0, &ipError);
if (ipError)
{
ipError->GetDescription(&bsStr);
sError = bsStr;
}
CString sRetErr;
sRetErr.Format("¶ÁÈ¡HandleֵΪ:%s µÄ¶ÔÏóʱ³ö´í.´íÎóÔÒò:%s", m_sEntityHandle, sError);
return sRetErr;
}
HRESULT XDWGReader::CreateDwgPointFields(ISpatialReference* ipSRef, IFields** ppfields)
{
IFieldsPtr ipFields;
ipFields.CreateInstance(CLSID_Fields);
IFieldsEditPtr ipFieldsEdit(ipFields);
IFieldPtr ipField;
ipField.CreateInstance(CLSID_Field);
IFieldEditPtr ipFieldEdit(ipField);
// create the geometry field
IGeometryDefPtr ipGeomDef(CLSID_GeometryDef);
IGeometryDefEditPtr ipGeomDefEdit(ipGeomDef);
// assign the geometry definiton properties.
ipGeomDefEdit->put_GeometryType(esriGeometryPoint);
ipGeomDefEdit->put_GridCount(1);
//double dGridSize = 1000;
//VARIANT_BOOL bhasXY;
//ipSRef->HasXYPrecision(&bhasXY);
//if (bhasXY)
//{
// double xmin, ymin, xmax, ymax, dArea;
// ipSRef->GetDomain(&xmin, &xmax, &ymin, &ymax);
// dArea = (xmax - xmin) * (ymax - ymin);
// dGridSize = sqrt(dArea / 100);
//}
//if (dGridSize <= 0)
// dGridSize = 1000;
ipGeomDefEdit->put_GridSize(0, DEFAULT_GIS_GRID_SIZE);
ipGeomDefEdit->put_AvgNumPoints(2);
ipGeomDefEdit->put_HasM(VARIANT_FALSE);
ipGeomDefEdit->put_HasZ(VARIANT_FALSE);
ipGeomDefEdit->putref_SpatialReference(ipSRef);
ipFieldEdit->put_Name(CComBSTR(L"SHAPE"));
ipFieldEdit->put_AliasName(CComBSTR(L"SHAPE"));
ipFieldEdit->put_Type(esriFieldTypeGeometry);
ipFieldEdit->putref_GeometryDef(ipGeomDef);
ipFieldsEdit->AddField(ipField);
// create the object id field
ipField.CreateInstance(CLSID_Field);
ipFieldEdit = ipField;
ipFieldEdit->put_Name(CComBSTR(L"OBJECTID"));
ipFieldEdit->put_AliasName(CComBSTR(L"OBJECT ID"));
ipFieldEdit->put_Type(esriFieldTypeOID);
ipFieldsEdit->AddField(ipField);
// ´´½¨ Entity £¬¼Ç¼esriʵÌåÀàÐÍ
ipField.CreateInstance(CLSID_Field);
ipFieldEdit = ipField;
ipFieldEdit->put_Name(CComBSTR(g_szEntityType));
ipFieldEdit->put_AliasName(CComBSTR(g_szEntityType));
ipFieldEdit->put_Type(esriFieldTypeString);
ipFieldsEdit->AddField(ipField);
// ´´½¨ DwgGeometry £¬¼Ç¼DWGʵÌåÀàÐÍ
ipField.CreateInstance(CLSID_Field);
ipFieldEdit = ipField;
ipFieldEdit->put_Name(CComBSTR(L"DwgGeometry"));
ipFieldEdit->put_AliasName(CComBSTR(L"DwgGeometry"));
ipFieldEdit->put_Type(esriFieldTypeString);
ipFieldsEdit->AddField(ipField);
// ´´½¨ Handle £¬¼Ç¼DWGʵÌå±àºÅ
ipField.CreateInstance(CLSID_Field);
ipFieldEdit = ipField;
ipFieldEdit->put_Name(CComBSTR(L"Handle"));
ipFieldEdit->put_AliasName(CComBSTR(L"Handle"));
ipFieldEdit->put_Type(esriFieldTypeString);
ipFieldEdit->put_Length(150);
ipFieldsEdit->AddField(ipField);
// ´´½¨ BaseName £¬¼Ç¼DWGʵÌå²ãÃû¼´DWGÎļþÃû
ipField.CreateInstance(CLSID_Field);
ipFieldEdit = ipField;
ipFieldEdit->put_Name(CComBSTR(L"BaseName"));
ipFieldEdit->put_AliasName(CComBSTR(L"BaseName"));
ipFieldEdit->put_Type(esriFieldTypeString);
ipFieldEdit->put_Length(250);
ipFieldsEdit->AddField(ipField);
// ´´½¨ Layer £¬¼Ç¼DWGʵÌå²ãÃû
ipField.CreateInstance(CLSID_Field);
ipFieldEdit = ipField;
ipFieldEdit->put_Name(CComBSTR(L"Layer"));
ipFieldEdit->put_AliasName(CComBSTR(L"Layer"));
ipFieldEdit->put_Type(esriFieldTypeString);
ipFieldEdit->put_Length(250);
ipFieldsEdit->AddField(ipField);
// ´´½¨ Color £¬¼Ç¼DWGʵÌå·ûºÅÑÕÉ«
ipField.CreateInstance(CLSID_Field);
ipFieldEdit = ipField;
ipFieldEdit->put_Name(CComBSTR(L"Color"));
ipFieldEdit->put_AliasName(CComBSTR(L"Color"));
ipFieldEdit->put_Type(esriFieldTypeInteger);
ipFieldsEdit->AddField(ipField);
// ´´½¨ Linetype £¬¼Ç¼ÏßÐÍ
ipField.CreateInstance(CLSID_Field);
ipFieldEdit = ipField;
ipFieldEdit->put_Name(CComBSTR(L"Linetype"));
ipFieldEdit->put_AliasName(CComBSTR(L"Linetype"));
ipFieldEdit->put_Type(esriFieldTypeString);
ipFieldEdit->put_Length(150);
ipFieldsEdit->AddField(ipField);
// ´´½¨ Thickness £¬¼Ç¼DWGʵÌåºñ¶È
ipField.CreateInstance(CLSID_Field);
ipFieldEdit = ipField;
ipFieldEdit->put_Name(CComBSTR(L"Thickness"));
ipFieldEdit->put_AliasName(CComBSTR(L"Thickness"));
ipFieldEdit->put_Type(esriFieldTypeString);
ipFieldEdit->put_Length(150);
ipFieldsEdit->AddField(ipField);
// ´´½¨ Scale £¬¼Ç¼DWGʵÌå·ûºÅ±ÈÀý´óС
ipField.CreateInstance(CLSID_Field);
ipFieldEdit = ipField;
ipFieldEdit->put_Name(CComBSTR(L"Scale"));
ipFieldEdit->put_AliasName(CComBSTR(L"Scale"));
ipFieldEdit->put_Type(esriFieldTypeDouble);
ipFieldsEdit->AddField(ipField);
// ´´½¨ Elevation £¬¼Ç¼DWGʵÌå¸ß³ÌÖµ
ipField.CreateInstance(CLSID_Field);
ipFieldEdit = ipField;
ipFieldEdit->put_Name(CComBSTR(L"Elevation"));
ipFieldEdit->put_AliasName(CComBSTR(L"Elevation"));
ipFieldEdit->put_Type(esriFieldTypeDouble);
ipFieldsEdit->AddField(ipField);
// ´´½¨ Blockname £¬¼Ç¼BlockÃû×Ö
ipField.CreateInstance(CLSID_Field);
ipFieldEdit = ipField;
ipFieldEdit->put_Name(CComBSTR(L"Blockname"));
ipFieldEdit->put_AliasName(CComBSTR(L"Blockname"));
ipFieldEdit->put_Type(esriFieldTypeString);
ipFieldEdit->put_Length(150);
ipFieldsEdit->AddField(ipField);
// ´´½¨ Blocknumber £¬¼Ç¼ÿ¸öBlock±àºÅ
ipField.CreateInstance(CLSID_Field);
ipFieldEdit = ipField;
ipFieldEdit->put_Name(CComBSTR(L"Blocknumber"));
ipFieldEdit->put_AliasName(CComBSTR(L"Blocknumber"));
ipFieldEdit->put_Type(esriFieldTypeInteger);
ipFieldsEdit->AddField(ipField);
// ´´½¨ Angle £¬¼Ç¼DWGʵÌåÐýת½Ç¶È
ipField.CreateInstance(CLSID_Field);
ipFieldEdit = ipField;
ipFieldEdit->put_Name(CComBSTR(L"Angle"));
ipFieldEdit->put_AliasName(CComBSTR(L"Angle"));
ipFieldEdit->put_Type(esriFieldTypeDouble);
ipFieldsEdit->AddField(ipField);
// ´´½¨ Visible £¬¼Ç¼DWGʵÌåÊÇ·ñ¿É¼û£¬0²»¿É¼û£¬1¿É¼û
ipField.CreateInstance(CLSID_Field);
ipFieldEdit = ipField;
ipFieldEdit->put_Name(CComBSTR(L"Visible"));
ipFieldEdit->put_AliasName(CComBSTR(L"Visible"));
ipFieldEdit->put_Type(esriFieldTypeInteger);
ipFieldsEdit->AddField(ipField);
*ppfields = ipFields.Detach();
return 0;
}
HRESULT XDWGReader::CreateDwgLineFields(ISpatialReference* ipSRef, IFields** ppfields)
{
IFieldsPtr ipFields;
ipFields.CreateInstance(CLSID_Fields);
IFieldsEditPtr ipFieldsEdit(ipFields);
IFieldPtr ipField;
ipField.CreateInstance(CLSID_Field);
IFieldEditPtr ipFieldEdit(ipField);
// create the geometry field
IGeometryDefPtr ipGeomDef(CLSID_GeometryDef);
IGeometryDefEditPtr ipGeomDefEdit(ipGeomDef);
// assign the geometry definiton properties.
ipGeomDefEdit->put_GeometryType(esriGeometryPolyline);
ipGeomDefEdit->put_GridCount(1);
//double dGridSize = 1000;
//VARIANT_BOOL bhasXY;
//ipSRef->HasXYPrecision(&bhasXY);
//if (bhasXY)
//{
// double xmin, ymin, xmax, ymax, dArea;
// ipSRef->GetDomain(&xmin, &xmax, &ymin, &ymax);
// dArea = (xmax - xmin) * (ymax - ymin);
// dGridSize = sqrt(dArea / 100);
//}
//if (dGridSize <= 0)
// dGridSize = 1000;
ipGeomDefEdit->put_GridSize(0, DEFAULT_GIS_GRID_SIZE);
ipGeomDefEdit->put_AvgNumPoints(2);
ipGeomDefEdit->put_HasM(VARIANT_FALSE);
ipGeomDefEdit->put_HasZ(VARIANT_FALSE);
ipGeomDefEdit->putref_SpatialReference(ipSRef);
ipFieldEdit->put_Name(CComBSTR(L"SHAPE"));
ipFieldEdit->put_AliasName(CComBSTR(L"SHAPE"));
ipFieldEdit->put_Type(esriFieldTypeGeometry);
ipFieldEdit->putref_GeometryDef(ipGeomDef);
ipFieldsEdit->AddField(ipField);
// create the object id field
ipField.CreateInstance(CLSID_Field);
ipFieldEdit = ipField;
ipFieldEdit->put_Name(CComBSTR(L"OBJECTID"));
ipFieldEdit->put_AliasName(CComBSTR(L"OBJECT ID"));
ipFieldEdit->put_Type(esriFieldTypeOID);
ipFieldsEdit->AddField(ipField);
// ´´½¨ Entity £¬¼Ç¼esriʵÌåÀàÐÍ
ipField.CreateInstance(CLSID_Field);
ipFieldEdit = ipField;
ipFieldEdit->put_Name(CComBSTR(g_szEntityType));
ipFieldEdit->put_AliasName(CComBSTR(g_szEntityType));
ipFieldEdit->put_Type(esriFieldTypeString);
ipFieldsEdit->AddField(ipField);
// ´´½¨ DwgGeometry £¬¼Ç¼DWGʵÌåÀàÐÍ
ipField.CreateInstance(CLSID_Field);
ipFieldEdit = ipField;
ipFieldEdit->put_Name(CComBSTR(L"DwgGeometry"));
ipFieldEdit->put_AliasName(CComBSTR(L"DwgGeometry"));
ipFieldEdit->put_Type(esriFieldTypeString);
ipFieldsEdit->AddField(ipField);
// ´´½¨ Handle £¬¼Ç¼DWGʵÌå±àºÅ
ipField.CreateInstance(CLSID_Field);
ipFieldEdit = ipField;
ipFieldEdit->put_Name(CComBSTR(L"Handle"));
ipFieldEdit->put_AliasName(CComBSTR(L"Handle"));
ipFieldEdit->put_Type(esriFieldTypeString);
ipFieldEdit->put_Length(150);
ipFieldsEdit->AddField(ipField);
// ´´½¨ BaseName £¬¼Ç¼DWGʵÌå²ãÃû¼´DWGÎļþÃû
ipField.CreateInstance(CLSID_Field);
ipFieldEdit = ipField;
ipFieldEdit->put_Name(CComBSTR(L"BaseName"));
ipFieldEdit->put_AliasName(CComBSTR(L"BaseName"));
ipFieldEdit->put_Type(esriFieldTypeString);
ipFieldEdit->put_Length(250);
ipFieldsEdit->AddField(ipField);
// ´´½¨ Layer £¬¼Ç¼DWGʵÌå²ãÃû
ipField.CreateInstance(CLSID_Field);
ipFieldEdit = ipField;
ipFieldEdit->put_Name(CComBSTR(L"Layer"));
ipFieldEdit->put_AliasName(CComBSTR(L"Layer"));
ipFieldEdit->put_Type(esriFieldTypeString);
ipFieldEdit->put_Length(250);
ipFieldsEdit->AddField(ipField);
// ´´½¨ Color £¬¼Ç¼DWGÏßʵÌåÑÕÉ«
ipField.CreateInstance(CLSID_Field);
ipFieldEdit = ipField;
ipFieldEdit->put_Name(CComBSTR(L"Color"));
ipFieldEdit->put_AliasName(CComBSTR(L"Color"));
ipFieldEdit->put_Type(esriFieldTypeInteger);
ipFieldsEdit->AddField(ipField);
// ´´½¨ Linetype £¬¼Ç¼DWGÏßʵÌåÏßÐÍÃû
ipField.CreateInstance(CLSID_Field);
ipFieldEdit = ipField;
ipFieldEdit->put_Name(CComBSTR(L"Linetype"));
ipFieldEdit->put_AliasName(CComBSTR(L"Linetype"));
ipFieldEdit->put_Type(esriFieldTypeString);
ipFieldEdit->put_Length(150);
ipFieldsEdit->AddField(ipField);
// ´´½¨ Elevation £¬¼Ç¼DWGʵÌå¸ß³ÌÖµ
ipField.CreateInstance(CLSID_Field);
ipFieldEdit = ipField;
ipFieldEdit->put_Name(CComBSTR(L"Elevation"));
ipFieldEdit->put_AliasName(CComBSTR(L"Elevation"));
ipFieldEdit->put_Type(esriFieldTypeDouble);
ipFieldsEdit->AddField(ipField);
// ´´½¨ Thickness £¬¼Ç¼DWGʵÌåºñ¶È
ipField.CreateInstance(CLSID_Field);
ipFieldEdit = ipField;
ipFieldEdit->put_Name(CComBSTR(L"Thickness"));
ipFieldEdit->put_AliasName(CComBSTR(L"Thickness"));
ipFieldEdit->put_Type(esriFieldTypeString);
ipFieldEdit->put_Length(150);
ipFieldsEdit->AddField(ipField);
// ´´½¨ Width £¬¼Ç¼DWGʵÌåÏß¿í
ipField.CreateInstance(CLSID_Field);
ipFieldEdit = ipField;
ipFieldEdit->put_Name(CComBSTR(L"Width"));
ipFieldEdit->put_AliasName(CComBSTR(L"Width"));
ipFieldEdit->put_Type(esriFieldTypeDouble);
ipFieldsEdit->AddField(ipField);
// ´´½¨ Blockname £¬¼Ç¼BlockÃû×Ö
ipField.CreateInstance(CLSID_Field);
ipFieldEdit = ipField;
ipFieldEdit->put_Name(CComBSTR(L"Blockname"));
ipFieldEdit->put_AliasName(CComBSTR(L"Blockname"));
ipFieldEdit->put_Type(esriFieldTypeString);
ipFieldEdit->put_Length(150);
ipFieldsEdit->AddField(ipField);<|fim▁hole|> ipFieldEdit->put_Name(CComBSTR(L"Blocknumber"));
ipFieldEdit->put_AliasName(CComBSTR(L"Blocknumber"));
ipFieldEdit->put_Type(esriFieldTypeInteger);
ipFieldsEdit->AddField(ipField);
// ´´½¨ Visible £¬¼Ç¼DWGʵÌåÊÇ·ñ¿É¼û£¬0²»¿É¼û£¬1¿É¼û
ipField.CreateInstance(CLSID_Field);
ipFieldEdit = ipField;
ipFieldEdit->put_Name(CComBSTR(L"Visible"));
ipFieldEdit->put_AliasName(CComBSTR(L"Visible"));
ipFieldEdit->put_Type(esriFieldTypeInteger);
ipFieldsEdit->AddField(ipField);
*ppfields = ipFields.Detach();
return 0;
}
HRESULT XDWGReader::CreateDwgPolygonFields(ISpatialReference* ipSRef, IFields** ppfields)
{
IFieldsPtr ipFields;
ipFields.CreateInstance(CLSID_Fields);
IFieldsEditPtr ipFieldsEdit(ipFields);
IFieldPtr ipField;
ipField.CreateInstance(CLSID_Field);
IFieldEditPtr ipFieldEdit(ipField);
ipFieldEdit->put_Name(CComBSTR(L"SHAPE"));
ipFieldEdit->put_Type(esriFieldTypeGeometry);
// create the geometry field
IGeometryDefPtr ipGeomDef(CLSID_GeometryDef);
IGeometryDefEditPtr ipGeomDefEdit;
ipGeomDefEdit = ipGeomDef;
// assign the geometry definiton properties.
ipGeomDefEdit->put_GeometryType(esriGeometryPolygon);
ipGeomDefEdit->put_GridCount(1);
ipGeomDefEdit->put_AvgNumPoints(2);
ipGeomDefEdit->put_HasM(VARIANT_FALSE);
ipGeomDefEdit->put_HasZ(VARIANT_FALSE);
//double dGridSize = 1000;
//VARIANT_BOOL bhasXY;
//ipSRef->HasXYPrecision(&bhasXY);
//if (bhasXY)
//{
// double xmin, ymin, xmax, ymax, dArea;
// ipSRef->GetDomain(&xmin, &xmax, &ymin, &ymax);
// dArea = (xmax - xmin) * (ymax - ymin);
// dGridSize = sqrt(dArea / 100);
//}
//if (dGridSize <= 0)
// dGridSize = 1000;
ipGeomDefEdit->put_GridSize(0, DEFAULT_GIS_GRID_SIZE);
ipGeomDefEdit->putref_SpatialReference(ipSRef);
ipFieldEdit->putref_GeometryDef(ipGeomDef);
ipFieldsEdit->AddField(ipField);
// create the object id field
ipField.CreateInstance(CLSID_Field);
ipFieldEdit = ipField;
ipFieldEdit->put_Name(CComBSTR(L"OBJECTID"));
ipFieldEdit->put_AliasName(CComBSTR(L"OBJECT ID"));
ipFieldEdit->put_Type(esriFieldTypeOID);
ipFieldsEdit->AddField(ipField);
// ´´½¨ Entity £¬¼Ç¼esriʵÌåÀàÐÍ
ipField.CreateInstance(CLSID_Field);
ipFieldEdit = ipField;
ipFieldEdit->put_Name(CComBSTR(g_szEntityType));
ipFieldEdit->put_AliasName(CComBSTR(g_szEntityType));
ipFieldEdit->put_Type(esriFieldTypeString);
ipFieldsEdit->AddField(ipField);
// ´´½¨ DwgGeometry £¬¼Ç¼DWGʵÌåÀàÐÍ
ipField.CreateInstance(CLSID_Field);
ipFieldEdit = ipField;
ipFieldEdit->put_Name(CComBSTR(L"DwgGeometry"));
ipFieldEdit->put_AliasName(CComBSTR(L"DwgGeometry"));
ipFieldEdit->put_Type(esriFieldTypeString);
ipFieldsEdit->AddField(ipField);
// ´´½¨ Handle £¬¼Ç¼DWGʵÌå±àºÅ
ipField.CreateInstance(CLSID_Field);
ipFieldEdit = ipField;
ipFieldEdit->put_Name(CComBSTR(L"Handle"));
ipFieldEdit->put_AliasName(CComBSTR(L"Handle"));
ipFieldEdit->put_Type(esriFieldTypeString);
ipFieldEdit->put_Length(150);
ipFieldsEdit->AddField(ipField);
// ´´½¨ BaseName £¬¼Ç¼DWGʵÌå²ãÃû¼´DWGÎļþÃû
ipField.CreateInstance(CLSID_Field);
ipFieldEdit = ipField;
ipFieldEdit->put_Name(CComBSTR(L"BaseName"));
ipFieldEdit->put_AliasName(CComBSTR(L"BaseName"));
ipFieldEdit->put_Type(esriFieldTypeString);
ipFieldEdit->put_Length(250);
ipFieldsEdit->AddField(ipField);
// ´´½¨ Layer £¬¼Ç¼DWGʵÌå²ãÃû
ipField.CreateInstance(CLSID_Field);
ipFieldEdit = ipField;
ipFieldEdit->put_Name(CComBSTR(L"Layer"));
ipFieldEdit->put_AliasName(CComBSTR(L"Layer"));
ipFieldEdit->put_Type(esriFieldTypeString);
ipFieldEdit->put_Length(250);
ipFieldsEdit->AddField(ipField);
// ´´½¨ Color £¬¼Ç¼DWGÏßʵÌåÑÕÉ«
ipField.CreateInstance(CLSID_Field);
ipFieldEdit = ipField;
ipFieldEdit->put_Name(CComBSTR(L"Color"));
ipFieldEdit->put_AliasName(CComBSTR(L"Color"));
ipFieldEdit->put_Type(esriFieldTypeInteger);
ipFieldsEdit->AddField(ipField);
// ´´½¨ Linetype £¬¼Ç¼DWGÏßʵÌåÏßÐÍÃû
ipField.CreateInstance(CLSID_Field);
ipFieldEdit = ipField;
ipFieldEdit->put_Name(CComBSTR(L"Linetype"));
ipFieldEdit->put_AliasName(CComBSTR(L"Linetype"));
ipFieldEdit->put_Type(esriFieldTypeString);
ipFieldEdit->put_Length(150);
ipFieldsEdit->AddField(ipField);
// ´´½¨ Elevation £¬¼Ç¼DWGʵÌå¸ß³ÌÖµ
ipField.CreateInstance(CLSID_Field);
ipFieldEdit = ipField;
ipFieldEdit->put_Name(CComBSTR(L"Elevation"));
ipFieldEdit->put_AliasName(CComBSTR(L"Elevation"));
ipFieldEdit->put_Type(esriFieldTypeDouble);
ipFieldsEdit->AddField(ipField);
// ´´½¨ Thickness £¬¼Ç¼DWGʵÌåºñ¶È
ipField.CreateInstance(CLSID_Field);
ipFieldEdit = ipField;
ipFieldEdit->put_Name(CComBSTR(L"Thickness"));
ipFieldEdit->put_AliasName(CComBSTR(L"Thickness"));
//ipFieldEdit->put_Type(esriFieldTypeDouble);
ipFieldEdit->put_Type(esriFieldTypeString);
ipFieldEdit->put_Length(150);
ipFieldsEdit->AddField(ipField);
// ´´½¨ Width £¬¼Ç¼DWGʵÌåÏß¿í
ipField.CreateInstance(CLSID_Field);
ipFieldEdit = ipField;
ipFieldEdit->put_Name(CComBSTR(L"Width"));
ipFieldEdit->put_AliasName(CComBSTR(L"Width"));
ipFieldEdit->put_Type(esriFieldTypeDouble);
ipFieldsEdit->AddField(ipField);
// ´´½¨ Blockname £¬¼Ç¼BlockÃû×Ö
ipField.CreateInstance(CLSID_Field);
ipFieldEdit = ipField;
ipFieldEdit->put_Name(CComBSTR(L"Blockname"));
ipFieldEdit->put_AliasName(CComBSTR(L"Blockname"));
ipFieldEdit->put_Type(esriFieldTypeString);
ipFieldEdit->put_Length(150);
ipFieldsEdit->AddField(ipField);
// ´´½¨ Blocknumber £¬¼Ç¼ÿ¸öBlock±àºÅ
ipField.CreateInstance(CLSID_Field);
ipFieldEdit = ipField;
ipFieldEdit->put_Name(CComBSTR(L"Blocknumber"));
ipFieldEdit->put_AliasName(CComBSTR(L"Blocknumber"));
ipFieldEdit->put_Type(esriFieldTypeInteger);
ipFieldsEdit->AddField(ipField);
// ´´½¨ Visible £¬¼Ç¼DWGʵÌåÊÇ·ñ¿É¼û£¬0²»¿É¼û£¬1¿É¼û
ipField.CreateInstance(CLSID_Field);
ipFieldEdit = ipField;
ipFieldEdit->put_Name(CComBSTR(L"Visible"));
ipFieldEdit->put_AliasName(CComBSTR(L"Visible"));
ipFieldEdit->put_Type(esriFieldTypeInteger);
ipFieldsEdit->AddField(ipField);
*ppfields = ipFields.Detach();
return 0;
}
HRESULT XDWGReader::CreateDwgTextPointFields(ISpatialReference* ipSRef, IFields** ppfields)
{
IFieldsPtr ipFields;
ipFields.CreateInstance(CLSID_Fields);
IFieldsEditPtr ipFieldsEdit(ipFields);
IFieldPtr ipField;
ipField.CreateInstance(CLSID_Field);
IFieldEditPtr ipFieldEdit(ipField);
// create the geometry field
IGeometryDefPtr ipGeomDef(CLSID_GeometryDef);
IGeometryDefEditPtr ipGeomDefEdit(ipGeomDef);
// assign the geometry definiton properties.
ipGeomDefEdit->put_GeometryType(esriGeometryPoint);
ipGeomDefEdit->put_GridCount(1);
//double dGridSize = 1000;
//VARIANT_BOOL bhasXY;
//ipSRef->HasXYPrecision(&bhasXY);
//if (bhasXY)
//{
// double xmin, ymin, xmax, ymax, dArea;
// ipSRef->GetDomain(&xmin, &xmax, &ymin, &ymax);
// dArea = (xmax - xmin) * (ymax - ymin);
// dGridSize = sqrt(dArea / 100);
//}
//if (dGridSize <= 0)
// dGridSize = 1000;
ipGeomDefEdit->put_GridSize(0, DEFAULT_GIS_GRID_SIZE);
ipGeomDefEdit->put_AvgNumPoints(2);
ipGeomDefEdit->put_HasM(VARIANT_FALSE);
ipGeomDefEdit->put_HasZ(VARIANT_FALSE);
ipGeomDefEdit->putref_SpatialReference(ipSRef);
ipFieldEdit->put_Name(CComBSTR(L"SHAPE"));
ipFieldEdit->put_AliasName(CComBSTR(L"SHAPE"));
ipFieldEdit->put_Type(esriFieldTypeGeometry);
ipFieldEdit->putref_GeometryDef(ipGeomDef);
ipFieldsEdit->AddField(ipField);
// create the object id field
ipField.CreateInstance(CLSID_Field);
ipFieldEdit = ipField;
ipFieldEdit->put_Name(CComBSTR(L"OBJECTID"));
ipFieldEdit->put_AliasName(CComBSTR(L"OBJECT ID"));
ipFieldEdit->put_Type(esriFieldTypeOID);
ipFieldsEdit->AddField(ipField);
// ´´½¨ Entity £¬¼Ç¼esriʵÌåÀàÐÍ
ipField.CreateInstance(CLSID_Field);
ipFieldEdit = ipField;
ipFieldEdit->put_Name(CComBSTR(g_szEntityType));
ipFieldEdit->put_AliasName(CComBSTR(g_szEntityType));
ipFieldEdit->put_Type(esriFieldTypeString);
ipFieldsEdit->AddField(ipField);
// ´´½¨ DwgGeometry £¬¼Ç¼DWGʵÌåÀàÐÍ
ipField.CreateInstance(CLSID_Field);
ipFieldEdit = ipField;
ipFieldEdit->put_Name(CComBSTR(L"DwgGeometry"));
ipFieldEdit->put_AliasName(CComBSTR(L"DwgGeometry"));
ipFieldEdit->put_Type(esriFieldTypeString);
ipFieldsEdit->AddField(ipField);
// ´´½¨ Handle £¬¼Ç¼DWGʵÌå±àºÅ
ipField.CreateInstance(CLSID_Field);
ipFieldEdit = ipField;
ipFieldEdit->put_Name(CComBSTR(L"Handle"));
ipFieldEdit->put_AliasName(CComBSTR(L"Handle"));
ipFieldEdit->put_Type(esriFieldTypeString);
ipFieldEdit->put_Length(150);
ipFieldsEdit->AddField(ipField);
// ´´½¨ BaseName £¬¼Ç¼DWGʵÌå²ãÃû¼´DWGÎļþÃû
ipField.CreateInstance(CLSID_Field);
ipFieldEdit = ipField;
ipFieldEdit->put_Name(CComBSTR(L"BaseName"));
ipFieldEdit->put_AliasName(CComBSTR(L"BaseName"));
ipFieldEdit->put_Type(esriFieldTypeString);
ipFieldEdit->put_Length(250);
ipFieldsEdit->AddField(ipField);
// ´´½¨ Layer £¬¼Ç¼DWGʵÌå²ãÃû
ipField.CreateInstance(CLSID_Field);
ipFieldEdit = ipField;
ipFieldEdit->put_Name(CComBSTR(L"Layer"));
ipFieldEdit->put_AliasName(CComBSTR(L"Layer"));
ipFieldEdit->put_Type(esriFieldTypeString);
ipFieldEdit->put_Length(250);
ipFieldsEdit->AddField(ipField);
// ´´½¨ Color £¬¼Ç¼DWGÏßʵÌåÑÕÉ«
ipField.CreateInstance(CLSID_Field);
ipFieldEdit = ipField;
ipFieldEdit->put_Name(CComBSTR(L"Color"));
ipFieldEdit->put_AliasName(CComBSTR(L"Color"));
ipFieldEdit->put_Type(esriFieldTypeInteger);
ipFieldsEdit->AddField(ipField);
// ´´½¨ Linetype £¬¼Ç¼DWGÏßʵÌåÏßÐÍÃû
ipField.CreateInstance(CLSID_Field);
ipFieldEdit = ipField;
ipFieldEdit->put_Name(CComBSTR(L"Linetype"));
ipFieldEdit->put_AliasName(CComBSTR(L"Linetype"));
ipFieldEdit->put_Type(esriFieldTypeString);
ipFieldEdit->put_Length(150);
ipFieldsEdit->AddField(ipField);
// ´´½¨ Thickness £¬¼Ç¼DWGʵÌåºñ¶È
ipField.CreateInstance(CLSID_Field);
ipFieldEdit = ipField;
ipFieldEdit->put_Name(CComBSTR(L"Thickness"));
ipFieldEdit->put_AliasName(CComBSTR(L"Thickness"));
ipFieldEdit->put_Type(esriFieldTypeString);
ipFieldEdit->put_Length(150);
ipFieldsEdit->AddField(ipField);
// ´´½¨ Blockname £¬¼Ç¼BlockÃû×Ö
ipField.CreateInstance(CLSID_Field);
ipFieldEdit = ipField;
ipFieldEdit->put_Name(CComBSTR(L"Blockname"));
ipFieldEdit->put_AliasName(CComBSTR(L"Blockname"));
ipFieldEdit->put_Type(esriFieldTypeString);
ipFieldEdit->put_Length(150);
ipFieldsEdit->AddField(ipField);
// ´´½¨ Blocknumber £¬¼Ç¼ÿ¸öBlock±àºÅ
ipField.CreateInstance(CLSID_Field);
ipFieldEdit = ipField;
ipFieldEdit->put_Name(CComBSTR(L"Blocknumber"));
ipFieldEdit->put_AliasName(CComBSTR(L"Blocknumber"));
ipFieldEdit->put_Type(esriFieldTypeInteger);
ipFieldsEdit->AddField(ipField);
// ´´½¨ Angle £¬¼Ç¼DWGÎÄ×ÖʵÌåÐýת½Ç¶È
ipField.CreateInstance(CLSID_Field);
ipFieldEdit = ipField;
ipFieldEdit->put_Name(CComBSTR(L"Angle"));
ipFieldEdit->put_AliasName(CComBSTR(L"Angle"));
ipFieldEdit->put_Type(esriFieldTypeDouble);
ipFieldsEdit->AddField(ipField);
// ´´½¨ TextString £¬¼Ç¼ DWGÎÄ×ÖʵÌå×ÖÄÚÈÝ
ipField.CreateInstance(CLSID_Field);
ipFieldEdit = ipField;
ipFieldEdit->put_Name(CComBSTR(L"TextString"));
ipFieldEdit->put_AliasName(CComBSTR(L"TextString"));
ipFieldEdit->put_Type(esriFieldTypeString);
ipFieldEdit->put_Length(255);
ipFieldsEdit->AddField(ipField);
// ´´½¨ Height £¬¼Ç¼DWGÎÄ×ÖʵÌå×Ö¸ß
ipField.CreateInstance(CLSID_Field);
ipFieldEdit = ipField;
ipFieldEdit->put_Name(CComBSTR(L"Height"));
ipFieldEdit->put_AliasName(CComBSTR(L"Height"));
ipFieldEdit->put_Type(esriFieldTypeDouble);
ipFieldsEdit->AddField(ipField);
// ´´½¨ WidthFactor £¬
// is an additional scaling applied in the x direction which makes the text either fatter or thinner.
ipField.CreateInstance(CLSID_Field);
ipFieldEdit = ipField;
ipFieldEdit->put_Name(CComBSTR(L"WidthFactor"));
ipFieldEdit->put_AliasName(CComBSTR(L"WidthFactor"));
ipFieldEdit->put_Type(esriFieldTypeDouble);
ipFieldsEdit->AddField(ipField);
// ´´½¨ Oblique £¬Çãб½Ç¶È
// is an obliquing angle to be applied to the text, which causes it to "lean" either to the right or left.
ipField.CreateInstance(CLSID_Field);
ipFieldEdit = ipField;
ipFieldEdit->put_Name(CComBSTR(L"Oblique"));
ipFieldEdit->put_AliasName(CComBSTR(L"Oblique"));
ipFieldEdit->put_Type(esriFieldTypeDouble);
ipFieldsEdit->AddField(ipField);
// ´´½¨ VerticalMode £¬¼Ç¼DWGÎÄ×ÖʵÌå×Ö¶ÔÆ뷽ʽ
// kTextBase 0 kTextBottom 1 kTextVertMid 2 kTextTop 3
ipField.CreateInstance(CLSID_Field);
ipFieldEdit = ipField;
ipFieldEdit->put_Name(CComBSTR(L"VtMode"));
ipFieldEdit->put_AliasName(CComBSTR(L"VtMode"));
ipFieldEdit->put_Type(esriFieldTypeInteger);
ipFieldsEdit->AddField(ipField);
// ´´½¨ HorizontalMode £¬¼Ç¼DWGÎÄ×ÖʵÌå×Ö¶ÔÆ뷽ʽ
//kTextLeft 0 kTextCenter 1 kTextRight 2 kTextAlign 3
// kTextMid 4 kTextFit 5
ipField.CreateInstance(CLSID_Field);
ipFieldEdit = ipField;
ipFieldEdit->put_Name(CComBSTR(L"HzMode"));
ipFieldEdit->put_AliasName(CComBSTR(L"HzMode"));
ipFieldEdit->put_Type(esriFieldTypeInteger);
ipFieldsEdit->AddField(ipField);
// ´´½¨ AlignmentPointX
ipField.CreateInstance(CLSID_Field);
ipFieldEdit = ipField;
ipFieldEdit->put_Name(CComBSTR(L"AlignPtX"));
ipFieldEdit->put_AliasName(CComBSTR(L"AlignPtX"));
ipFieldEdit->put_Type(esriFieldTypeDouble);
ipFieldsEdit->AddField(ipField);
// ´´½¨ AlignmentPointY
ipField.CreateInstance(CLSID_Field);
ipFieldEdit = ipField;
ipFieldEdit->put_Name(CComBSTR(L"AlignPtY"));
ipFieldEdit->put_AliasName(CComBSTR(L"AlignPtY"));
ipFieldEdit->put_Type(esriFieldTypeDouble);
ipFieldsEdit->AddField(ipField);
// ´´½¨ BoundingPointMinX
ipField.CreateInstance(CLSID_Field);
ipFieldEdit = ipField;
ipFieldEdit->put_Name(CComBSTR(L"PtMinX"));
ipFieldEdit->put_AliasName(CComBSTR(L"PtMinX"));
ipFieldEdit->put_Type(esriFieldTypeDouble);
ipFieldsEdit->AddField(ipField);
// ´´½¨ BoundingPointMinY
ipField.CreateInstance(CLSID_Field);
ipFieldEdit = ipField;
ipFieldEdit->put_Name(CComBSTR(L"PtMinY"));
ipFieldEdit->put_AliasName(CComBSTR(L"PtMinY"));
ipFieldEdit->put_Type(esriFieldTypeDouble);
ipFieldsEdit->AddField(ipField);
// ´´½¨ BoundingPointMaxX
ipField.CreateInstance(CLSID_Field);
ipFieldEdit = ipField;
ipFieldEdit->put_Name(CComBSTR(L"PtMaxX"));
ipFieldEdit->put_AliasName(CComBSTR(L"PtMaxX"));
ipFieldEdit->put_Type(esriFieldTypeDouble);
ipFieldsEdit->AddField(ipField);
// ´´½¨ BoundingPointMaxY
ipField.CreateInstance(CLSID_Field);
ipFieldEdit = ipField;
ipFieldEdit->put_Name(CComBSTR(L"PtMaxY"));
ipFieldEdit->put_AliasName(CComBSTR(L"PtMaxY"));
ipFieldEdit->put_Type(esriFieldTypeDouble);
ipFieldsEdit->AddField(ipField);
// ´´½¨ BigFontname £¬¼Ç¼ DWGÎÄ×ÖʵÌå×ÖÌå
ipField.CreateInstance(CLSID_Field);
ipFieldEdit = ipField;
ipFieldEdit->put_Name(CComBSTR(L"BigFontname"));
ipFieldEdit->put_AliasName(CComBSTR(L"BigFontname"));
ipFieldEdit->put_Type(esriFieldTypeString);
ipFieldEdit->put_Length(150);
ipFieldsEdit->AddField(ipField);
// ´´½¨ ShapeFilename £¬¼Ç¼ DWGÎÄ×ÖʵÌå×ÖÌå
ipField.CreateInstance(CLSID_Field);
ipFieldEdit = ipField;
ipFieldEdit->put_Name(CComBSTR(L"ShapeFilename"));
ipFieldEdit->put_AliasName(CComBSTR(L"ShapeFilename"));
ipFieldEdit->put_Type(esriFieldTypeString);
ipFieldEdit->put_Length(150);
ipFieldsEdit->AddField(ipField);
// ´´½¨ ShapeName £¬¼Ç¼ DWGÎÄ×ÖʵÌå×ÖÌå
ipField.CreateInstance(CLSID_Field);
ipFieldEdit = ipField;
ipFieldEdit->put_Name(CComBSTR(L"ShapeName"));
ipFieldEdit->put_AliasName(CComBSTR(L"ShapeName"));
ipFieldEdit->put_Type(esriFieldTypeString);
ipFieldEdit->put_Length(150);
ipFieldsEdit->AddField(ipField);
// ´´½¨ Visible £¬¼Ç¼DWGʵÌåÊÇ·ñ¿É¼û£¬0²»¿É¼û£¬1¿É¼û
ipField.CreateInstance(CLSID_Field);
ipFieldEdit = ipField;
ipFieldEdit->put_Name(CComBSTR(L"Visible"));
ipFieldEdit->put_AliasName(CComBSTR(L"Visible"));
ipFieldEdit->put_Type(esriFieldTypeInteger);
ipFieldsEdit->AddField(ipField);
*ppfields = ipFields.Detach();
return 0;
}
//////////////////////////////////////////////////////////////////////////
//¼òÒªÃèÊö : ´´½¨×¢¼Çͼ²ã×Ö¶Î
//ÊäÈë²ÎÊý :
//·µ »Ø Öµ :
//ÐÞ¸ÄÈÕÖ¾ :
//////////////////////////////////////////////////////////////////////////
HRESULT XDWGReader::CreateDwgAnnotationFields(ISpatialReference* ipSRef, IFields** ppfields)
{
HRESULT hr;
IObjectClassDescriptionPtr pOCDesc(CLSID_AnnotationFeatureClassDescription);
IFieldsPtr pReqFields;
pOCDesc->get_RequiredFields(&pReqFields);
//ÉèÖÿռä²Î¿¼
if (ipSRef != NULL)
{
long numFields;
pReqFields->get_FieldCount(&numFields);
for (int i = 0; i < numFields; i++)
{
IFieldPtr pField;
pReqFields->get_Field(i, &pField);
esriFieldType fldType;
pField->get_Type(&fldType);
if (fldType == esriFieldTypeGeometry)
{
IFieldEditPtr pEdtField = pField;
IGeometryDefPtr pGeoDef;
hr = pEdtField->get_GeometryDef(&pGeoDef);
IGeometryDefEditPtr pEdtGeoDef = pGeoDef;
hr = pEdtGeoDef->putref_SpatialReference(ipSRef);
hr = pEdtField->putref_GeometryDef(pGeoDef);
break;
}
}
}
IFieldsEditPtr ipFieldsEdit = pReqFields;
//´´½¨CADÎļþÖÐ×¢¼Çͼ²ã×Ö¶Î
IFieldEditPtr ipFieldEdit;
IFieldPtr ipField;
// ´´½¨ Entity £¬¼Ç¼esriʵÌåÀàÐÍ
ipField.CreateInstance(CLSID_Field);
ipFieldEdit = ipField;
ipFieldEdit->put_Name(CComBSTR("Entity_Type"));
ipFieldEdit->put_AliasName(CComBSTR("Entity_Type"));
ipFieldEdit->put_Type(esriFieldTypeString);
ipFieldsEdit->AddField(ipField);
// ´´½¨ Handle £¬¼Ç¼DWGʵÌå±àºÅ
ipField.CreateInstance(CLSID_Field);
ipFieldEdit = ipField;
ipFieldEdit->put_Name(CComBSTR(L"Handle"));
ipFieldEdit->put_AliasName(CComBSTR(L"Handle"));
ipFieldEdit->put_Type(esriFieldTypeString);
ipFieldEdit->put_Length(150);
ipFieldsEdit->AddField(ipField);
// ´´½¨ BaseName £¬¼Ç¼DWGʵÌå²ãÃû¼´DWGÎļþÃû
ipField.CreateInstance(CLSID_Field);
ipFieldEdit = ipField;
ipFieldEdit->put_Name(CComBSTR(L"BaseName"));
ipFieldEdit->put_AliasName(CComBSTR(L"BaseName"));
ipFieldEdit->put_Type(esriFieldTypeString);
ipFieldEdit->put_Length(250);
ipFieldsEdit->AddField(ipField);
// ´´½¨ Layer £¬¼Ç¼DWGʵÌå²ãÃû
ipField.CreateInstance(CLSID_Field);
ipFieldEdit = ipField;
ipFieldEdit->put_Name(CComBSTR(L"Layer"));
ipFieldEdit->put_AliasName(CComBSTR(L"Layer"));
ipFieldEdit->put_Type(esriFieldTypeString);
ipFieldEdit->put_Length(250);
ipFieldsEdit->AddField(ipField);
// ´´½¨ Color £¬¼Ç¼DWGʵÌå·ûºÅÑÕÉ«
ipField.CreateInstance(CLSID_Field);
ipFieldEdit = ipField;
ipFieldEdit->put_Name(CComBSTR(L"Color"));
ipFieldEdit->put_AliasName(CComBSTR(L"Color"));
ipFieldEdit->put_Type(esriFieldTypeInteger);
ipFieldsEdit->AddField(ipField);
// ´´½¨ Thickness £¬¼Ç¼DWGʵÌåºñ¶È
ipField.CreateInstance(CLSID_Field);
ipFieldEdit = ipField;
ipFieldEdit->put_Name(CComBSTR(L"Thickness"));
ipFieldEdit->put_AliasName(CComBSTR(L"Thickness"));
ipFieldEdit->put_Type(esriFieldTypeString);
ipFieldEdit->put_Length(150);
ipFieldsEdit->AddField(ipField);
// ´´½¨ Elevation £¬¼Ç¼DWGʵÌå¸ß³ÌÖµ
ipField.CreateInstance(CLSID_Field);
ipFieldEdit = ipField;
ipFieldEdit->put_Name(CComBSTR(L"Elevation"));
ipFieldEdit->put_AliasName(CComBSTR(L"Elevation"));
ipFieldEdit->put_Type(esriFieldTypeDouble);
ipFieldsEdit->AddField(ipField);
// ´´½¨ Height £¬¼Ç¼¸ß¶È
ipField.CreateInstance(CLSID_Field);
ipFieldEdit = ipField;
ipFieldEdit->put_Name(CComBSTR(L"Height"));
ipFieldEdit->put_AliasName(CComBSTR(L"Height"));
ipFieldEdit->put_Type(esriFieldTypeDouble);
ipFieldsEdit->AddField(ipField);
// ´´½¨ TextStyle £¬¼Ç¼ÎÄ×ÖÑùʽ
ipField.CreateInstance(CLSID_Field);
ipFieldEdit = ipField;
ipFieldEdit->put_Name(CComBSTR(L"TextStyle"));
ipFieldEdit->put_AliasName(CComBSTR(L"TextStyle"));
ipFieldEdit->put_Type(esriFieldTypeString);
ipFieldEdit->put_Length(150);
ipFieldsEdit->AddField(ipField);
// ´´½¨ Oblique £¬¼Ç¼Çã½Ç
ipField.CreateInstance(CLSID_Field);
ipFieldEdit = ipField;
ipFieldEdit->put_Name(CComBSTR(L"Oblique"));
ipFieldEdit->put_AliasName(CComBSTR(L"Oblique"));
ipFieldEdit->put_Type(esriFieldTypeDouble);
ipFieldsEdit->AddField(ipField);
// ´´½¨ AlignmentPointX
ipField.CreateInstance(CLSID_Field);
ipFieldEdit = ipField;
ipFieldEdit->put_Name(CComBSTR(L"AlignPtX"));
ipFieldEdit->put_AliasName(CComBSTR(L"AlignPtX"));
ipFieldEdit->put_Type(esriFieldTypeDouble);
ipFieldsEdit->AddField(ipField);
// ´´½¨ AlignmentPointY
ipField.CreateInstance(CLSID_Field);
ipFieldEdit = ipField;
ipFieldEdit->put_Name(CComBSTR(L"AlignPtY"));
ipFieldEdit->put_AliasName(CComBSTR(L"AlignPtY"));
ipFieldEdit->put_Type(esriFieldTypeDouble);
ipFieldsEdit->AddField(ipField);
*ppfields = ipFieldsEdit.Detach();
return 0;
}
/************************************************************************
¼òÒªÃèÊö : ´´½¨À©Õ¹ÊôÐÔ±í
ÊäÈë²ÎÊý :
·µ »Ø Öµ :
ÐÞ¸ÄÈÕÖ¾ :
************************************************************************/
HRESULT XDWGReader::CreateExtendTable(IFeatureWorkspace* pFeatWorkspace, BSTR bstrName, ITable** pTable)
{
HRESULT hr;
if (pFeatWorkspace == NULL)
return E_FAIL;
// Ö»´´½¨£ºBaseName--ͼÃû£»Handle--ÒªËØID;XDataName--À©Õ¹ÊôÐÔÃû³Æ;XDataNum--À©Õ¹ÊôÐÔ±àºÅ;XDataValue--À©Õ¹ËµÃ÷Öµ
hr = pFeatWorkspace->OpenTable(bstrName, pTable);
// Èç¹û´ò²»¿ªtable¾ÍÈÏΪ²»´æÔÚ¾ÍÖؽ¨table
if (*pTable == NULL)
{
IFieldsPtr ipFields;
ipFields.CreateInstance(CLSID_Fields);
IFieldsEditPtr ipIndexFields;
ipIndexFields.CreateInstance(CLSID_Fields);
IFieldsEditPtr ipFieldsEdit = ipFields;
if (ipFieldsEdit == NULL)
return E_FAIL;
// Add a field for the user name
IFieldEditPtr ipField;
hr = ipField.CreateInstance(CLSID_Field);
if (FAILED(hr))
return hr;
hr = ipField->put_Name(CComBSTR(L"Handle"));
if (FAILED(hr))
return hr;
hr = ipField->put_Type(esriFieldTypeString);
if (FAILED(hr))
return hr;
hr = ipField->put_Length(150);
if (FAILED(hr))
return hr;
hr = ipField->put_Required(VARIANT_TRUE);
if (FAILED(hr))
return hr;
hr = ipFieldsEdit->AddField(ipField);
if (FAILED(hr))
return hr;
//Ìí¼ÓË÷Òý×Ö¶Î1
hr = ipIndexFields->AddField(ipField);
if (FAILED(hr))
return hr;
hr = ipField.CreateInstance(CLSID_Field);
if (FAILED(hr))
return hr;
hr = ipField->put_Name(CComBSTR(L"BaseName"));
if (FAILED(hr))
return hr;
hr = ipField->put_Type(esriFieldTypeString);
if (FAILED(hr))
return hr;
hr = ipField->put_Length(250);
if (FAILED(hr))
return hr;
hr = ipField->put_Required(VARIANT_TRUE);
if (FAILED(hr))
return hr;
hr = ipFieldsEdit->AddField(ipField);
if (FAILED(hr))
return hr;
//Ìí¼ÓË÷Òý×Ö¶Î2
hr = ipIndexFields->AddField(ipField);
if (FAILED(hr))
return hr;
hr = ipField.CreateInstance(CLSID_Field);
if (FAILED(hr))
return hr;
hr = ipField->put_Name(CComBSTR(L"XDataName"));
if (FAILED(hr))
return hr;
hr = ipField->put_Type(esriFieldTypeString);
if (FAILED(hr))
return hr;
hr = ipField->put_Length(250);
if (FAILED(hr))
return hr;
hr = ipFieldsEdit->AddField(ipField);
if (FAILED(hr))
return hr;
// 2050 Ϊ×î´óÈÝÁ¿
hr = ipField.CreateInstance(CLSID_Field);
if (FAILED(hr))
return hr;
hr = ipField->put_Name(CComBSTR(L"XDataValue"));
if (FAILED(hr))
return hr;
hr = ipField->put_Type(esriFieldTypeString);
if (FAILED(hr))
return hr;
hr = ipField->put_Length(65535);
if (FAILED(hr))
return hr;
hr = ipFieldsEdit->AddField(ipField);
if (FAILED(hr))
return hr;
// Try to Create the table
hr = pFeatWorkspace->CreateTable(bstrName, ipFields, NULL, NULL, NULL, pTable);
if (FAILED(hr))
return hr;
IIndexEditPtr ipIndexEdit;
ipIndexEdit.CreateInstance(CLSID_Index);
ipIndexEdit->putref_Fields(ipIndexFields);
hr = (*pTable)->AddIndex(ipIndexEdit);
if (FAILED(hr))
return hr;
}
return S_OK;
}
HRESULT XDWGReader::CreateDatasetFeatureClass(IFeatureWorkspace* pFWorkspace, IFeatureDataset* pFDS, IFields* pFields, BSTR bstrName, esriFeatureType featType, IFeatureClass*& ppFeatureClass)
{
if (!pFDS && !pFWorkspace)
return S_FALSE;
BSTR bstrConfigWord = L"";
IFieldPtr ipField;
CComBSTR bstrShapeFld;
esriFieldType fieldType;
long lNumFields;
pFields->get_FieldCount(&lNumFields);
for (int i = 0; i < lNumFields; i++)
{
pFields->get_Field(i, &ipField);
ipField->get_Type(&fieldType);
if (esriFieldTypeGeometry == fieldType)
{
ipField->get_Name(&bstrShapeFld);
break;
}
}
HRESULT hr;
if (pFDS)
{
hr = pFDS->CreateFeatureClass(bstrName, pFields, 0, 0, featType, bstrShapeFld, 0, &ppFeatureClass);
}
else
{
// Ö±½Ó´ò¿ªFeatureClass,Èç¹û²»³É¹¦¾ÍÔÙ´´½¨
hr = pFWorkspace->OpenFeatureClass(bstrName, &ppFeatureClass);
if (ppFeatureClass == NULL)
hr = pFWorkspace->CreateFeatureClass(bstrName, pFields, 0, 0, featType, bstrShapeFld, 0, &ppFeatureClass);
}
return hr;
}
void XDWGReader::GetGeometryDef(IFeatureClass* pClass, IGeometryDef** pDef)
{
try
{
BSTR shapeName;
pClass->get_ShapeFieldName(&shapeName);
IFieldsPtr pFields;
pClass->get_Fields(&pFields);
long lGeomIndex;
pFields->FindField(shapeName, &lGeomIndex);
IFieldPtr pField;
pFields->get_Field(lGeomIndex, &pField);
pField->get_GeometryDef(pDef);
}
catch (...)
{
}
}
BOOL XDWGReader::IsResetDomain(IFeatureWorkspace* pFWorkspace, CString szFCName)
{
IWorkspace2Ptr iws2(pFWorkspace);
VARIANT_BOOL isexist = FALSE;
if (iws2)
{
iws2->get_NameExists(esriDTFeatureClass, CComBSTR(szFCName), &isexist);
}
return isexist;
}
void XDWGReader::ResetDomain(IFeatureWorkspace* pFWorkspace, CString szFCName, ISpatialReference* ipSRef)
{
IGeometryDefPtr ipGeomDef;
ISpatialReferencePtr ipOldSRef;
double mOldMinX, mOldMinY, mOldMaxY, mOldMaxX;
double mMinX, mMinY, mMaxY, mMaxX;
double mNewMinX, mNewMinY, mNewMaxY, mNewMaxX, dFX, dFY, mNewXYScale ;
HRESULT hr;
pFWorkspace->OpenFeatureClass(CComBSTR(szFCName), &m_pFeatClassPolygon);
GetGeometryDef(m_pFeatClassPolygon, &ipGeomDef);
pFWorkspace->OpenFeatureClass(CComBSTR(szFCName), &m_pFeatClassPoint);
GetGeometryDef(m_pFeatClassPoint, &ipGeomDef);
pFWorkspace->OpenFeatureClass(CComBSTR(szFCName), &m_pFeatClassLine);
GetGeometryDef(m_pFeatClassLine, &ipGeomDef);
//pFWorkspace->OpenFeatureClass(CComBSTR(szFCName), &m_pFeatClassText);
//GetGeometryDef(m_pFeatClassText, &ipGeomDef);
ipGeomDef->get_SpatialReference(&ipOldSRef);
ipOldSRef->GetDomain(&mOldMinX, &mOldMaxX, &mOldMinY, &mOldMaxY);
ipSRef->GetDomain(&mMinX, &mMaxX, &mMinY, &mMaxY);
if (mMinX < mOldMinX)
mNewMinX = mMinX;
else
mNewMinX = mOldMinX;
if (mMinY < mOldMinY)
mNewMinY = mMinY;
else
mNewMinY = mOldMinY;
if (mMaxX > mOldMaxX)
mNewMaxX = mMaxX;
else
mNewMaxX = mOldMaxX;
if (mMaxY > mOldMaxY)
mNewMaxY = mMaxY;
else
mNewMaxY = mOldMaxY;
ipOldSRef->SetDomain(mNewMinX, mNewMaxX, mNewMinY, mNewMaxY);
ipOldSRef->GetFalseOriginAndUnits(&dFX, &dFY, &mNewXYScale);
ipOldSRef->GetDomain(&mNewMinX, &mNewMaxX, &mNewMinY, &mNewMaxY);
IGeometryDefEditPtr ipGeomDefEdit(ipGeomDef);
hr = ipGeomDefEdit->putref_SpatialReference(ipOldSRef);
if (FAILED(hr))
{
WriteLog(CatchErrorInfo());
}
}
// bsplineËã·¨
/*********************************************************************
²Î¿¼:
n - ¿ØÖƵãÊý - 1
t - the polynomialµÈ¼¶ + 1
control - ¿ØÖƵã×ø±ê¼¯
output - Êä³öÄâºÏµã×ø±ê¼¯
num_output - Êä³öµãÊý
Ìõ¼þ:
n+2>t (·ñÔòÎÞÇúÏß)
¿ØÖƵã×ø±ê¼¯ºÍµãÊýÒ»ÖÂ
·ÖÅäÊä³öµã¼¯µãÊýºÍ num_outputÒ»ÖÂ
**********************************************************************/
void XDWGReader::Bspline(int n, int t, DwgPoint* control, DwgPoint* output, int num_output)
{
int* u;
double increment, interval;
DwgPoint calcxyz;
int output_index;
u = new int[n + t + 1];
ComputeIntervals(u, n, t);
increment = (double) (n - t + 2) / (num_output - 1); // how much parameter goes up each time
interval = 0;
for (output_index = 0; output_index < num_output - 1; output_index++)
{
ComputePoint(u, n, t, interval, control, &calcxyz);
output[output_index].x = calcxyz.x;
output[output_index].y = calcxyz.y;
output[output_index].z = calcxyz.z;
interval = interval + increment; // increment our parameter
}
output[num_output - 1].x = control[n].x; // put in the last DwgPoint
output[num_output - 1].y = control[n].y;
output[num_output - 1].z = control[n].z;
delete u;
}
double XDWGReader::Blend(int k, int t, int* u, double v) // calculate the blending value
{
double value;
if (t == 1) // base case for the recursion
{
if ((u[k] <= v) && (v < u[k + 1]))
value = 1;
else
value = 0;
}
else
{
if ((u[k + t - 1] == u[k]) && (u[k + t] == u[k + 1])) // check for divide by zero
{
value = 0;
}
else if (u[k + t - 1] == u[k]) // if a term's denominator is zero,use just the other
{
value = (u[k + t] - v) / (u[k + t] - u[k + 1]) * Blend(k + 1, t - 1, u, v);
}
else if (u[k + t] == u[k + 1])
{
value = (v - u[k]) / (u[k + t - 1] - u[k]) * Blend(k, t - 1, u, v);
}
else
{
value = (v - u[k]) / (u[k + t - 1] - u[k]) * Blend(k, t - 1, u, v) + (u[k + t] - v) / (u[k + t] - u[k + 1]) * Blend(k + 1, t - 1, u, v);
}
}
return value;
}
void XDWGReader::ComputeIntervals(int* u, int n, int t) // figure out the knots
{
int j;
for (j = 0; j <= n + t; j++)
{
if (j < t)
u[j] = 0;
else if ((t <= j) && (j <= n))
u[j] = j - t + 1;
else if (j > n)
u[j] = n - t + 2; // if n-t=-2 then we're screwed, everything goes to 0
}
}
void XDWGReader::ComputePoint(int* u, int n, int t, double v, DwgPoint* control, DwgPoint* output)
{
int k;
double temp;
// initialize the variables that will hold our outputted DwgPoint
output->x = 0;
output->y = 0;
output->z = 0;
for (k = 0; k <= n; k++)
{
temp = Blend(k, t, u, v); // same blend is used for each dimension coordinate
output->x = output->x + (control[k]).x * temp;
output->y = output->y + (control[k]).y * temp;
output->z = output->z + (control[k]).z * temp;
}
}
/************************************************************************
¼òÒªÃèÊö : Ìí¼ÓÀ©Õ¹ÊôÐÔ×Ö¶Î
ÊäÈë²ÎÊý :
·µ »Ø Öµ :
ÐÞ¸ÄÈÕÖ¾ :
************************************************************************/
void XDWGReader::AddExtraFields(CStringList* pRegapps)
{
if (pRegapps == NULL) return;
if (m_IsJoinXDataAttrs == FALSE || pRegapps->GetCount() <= 0)
{
return;
}
m_Regapps.AddTail(pRegapps);
}
/************************************************************************
¼òÒªÃèÊö : ³õʼ»¯±àÂë¶ÔÕÕ±í
ÊäÈë²ÎÊý :
·µ »Ø Öµ :
ÐÞ¸ÄÈÕÖ¾ :
************************************************************************/
//void XDWGReader::InitCompareCodes(ITable* pCompareTable)
//{
//if (pCompareTable==NULL) return;
// CleanCompareCodes();
// //IFeatureWorkspacePtr ipFeatureWorkspace = API_GetSysWorkspace();
// //if (ipFeatureWorkspace == NULL)
// //{
// // AfxMessageBox("´ò¿ªÏµÍ³±í³ö´í£¡", MB_ICONERROR);
// // return;
// //}
// //ITablePtr pCompareTable;
// //ipFeatureWorkspace->OpenTable(CComBSTR("CAD2GDB"), &pCompareTable);
// //if (pCompareTable == NULL)
// //{
// // AfxMessageBox("±àÂë¶ÔÕÕ±í²»´æÔÚ£¬ÎÞ·¨½øÐбàÂë¶ÔÕÕ¡£", MB_ICONERROR);
// // return;
// //}
// CComBSTR bsStr;
// IEsriCursorPtr ipCursor;
// pCompareTable->Search(NULL, VARIANT_FALSE, &ipCursor);
// if (ipCursor != NULL)
// {
// long lFieldIndex = -1;
// IEsriRowPtr ipRow;
// IFieldsPtr pFields = NULL;
// ipCursor->NextRow(&ipRow);
// while (ipRow != NULL)
// {
// CComVariant vt;
// XDwg2GdbRecord* pTbRow = new XDwg2GdbRecord();
// lFieldIndex = -1;
// ipRow->get_Fields(&pFields);
// bsStr = "DWG_LAYER";
// pFields->FindField(bsStr, &lFieldIndex);
// if (lFieldIndex != -1)
// {
// ipRow->get_Value(lFieldIndex, &vt);
// if (vt.vt != VT_EMPTY && vt.vt != VT_NULL)
// {
// pTbRow->DWG_LAYER = (CString) vt.bstrVal;
// }
// }
// bsStr = "DWG_BLOCKNAME";
// pFields->FindField(bsStr, &lFieldIndex);
// if (lFieldIndex != -1)
// {
// ipRow->get_Value(lFieldIndex, &vt);
// if (vt.vt != VT_EMPTY && vt.vt != VT_NULL)
// {
// pTbRow->DWG_BLOCKNAME = (CString) vt.bstrVal;
// }
// }
// bsStr = "GDB_LAYER";
// pFields->FindField(bsStr, &lFieldIndex);
// if (lFieldIndex != -1)
// {
// ipRow->get_Value(lFieldIndex, &vt);
// if (vt.vt != VT_EMPTY && vt.vt != VT_NULL)
// {
// pTbRow->GDB_LAYER = (CString) vt.bstrVal;
// }
// }
// bsStr = "YSDM";
// pFields->FindField(bsStr, &lFieldIndex);
// if (lFieldIndex != -1)
// {
// ipRow->get_Value(lFieldIndex, &vt);
// if (vt.vt != VT_EMPTY && vt.vt != VT_NULL)
// {
// pTbRow->YSDM = (CString) vt.bstrVal;
// }
// }
// bsStr = "YSMC";
// pFields->FindField(bsStr, &lFieldIndex);
// if (lFieldIndex != -1)
// {
// ipRow->get_Value(lFieldIndex, &vt);
// if (vt.vt != VT_EMPTY && vt.vt != VT_NULL)
// {
// pTbRow->YSMC = (CString) vt.bstrVal;
// }
// }
// ipCursor->NextRow(&ipRow);
// //¼ÓÈë¶ÔÕÕÖµ
// m_aryCodes.Add(pTbRow);
// }
// }
// bsStr.Empty();
//}
/************************************************************************
¼òÒªÃèÊö : ´ÓFeatureBufferÖеõ½¸ø¶¨×Ö¶ÎÃûµÄÖµ
ÊäÈë²ÎÊý : pFeatureBuffer£ºÔ´pFeatureBuffer, sFieldName£ºÐèҪȡֵµÄ×Ö¶ÎÃû
·µ »Ø Öµ : ¸Ã×Ö¶ÎÔÚFeatureBufferÖеÄÖµ
ÐÞ¸ÄÈÕÖ¾ :
************************************************************************/
CString XDWGReader::GetFeatureBufferFieldValue(IFeatureBuffer*& pFeatureBuffer, CString sFieldName)
{
CComVariant vtFieldValue;
CString sFieldValue;
long lIndex;
IFieldsPtr pFields;
pFeatureBuffer->get_Fields(&pFields);
CComBSTR bsStr = sFieldName;
pFields->FindField(bsStr, &lIndex);
bsStr.Empty();
if (lIndex == -1)
{
sFieldValue = "";
}
else
{
pFeatureBuffer->get_Value(lIndex, &vtFieldValue);
switch (vtFieldValue.vt)
{
case VT_EMPTY:
case VT_NULL:
sFieldValue = "";
break;
case VT_BOOL:
sFieldValue = vtFieldValue.boolVal == TRUE ? "1" : "0";
break;
case VT_UI1:
sFieldValue.Format("%d", vtFieldValue.bVal);
break;
case VT_I2:
sFieldValue.Format("%d", vtFieldValue.iVal);
break;
case VT_I4:
sFieldValue.Format("%d", vtFieldValue.lVal);
break;
case VT_R4:
{
long lVal = vtFieldValue.fltVal;
sFieldValue.Format("%d", lVal);
}
break;
case VT_R8:
{
long lVal = vtFieldValue.dblVal;
sFieldValue.Format("%d", lVal);
}
break;
case VT_BSTR:
sFieldValue = vtFieldValue.bstrVal;
break;
default:
sFieldValue = "";
break;
}
}
return sFieldValue;
}
/************************************************************************
¼òÒªÃèÊö :
ÊäÈë²ÎÊý :
·µ »Ø Öµ :
ÐÞ¸ÄÈÕÖ¾ :
************************************************************************/
//void XDWGReader::PutExtraAttributes(IFeatureBuffer*& pFeatureBuffer, XDwg2GdbRecord* pCode)
//{
// HRESULT hr;
// LONG lFieldIndex;
//
// IFieldsPtr ipFields;
// pFeatureBuffer->get_Fields(&ipFields);
//
//
// CComBSTR bsStr;
// CComVariant vtVal;
//
// bsStr = "GDB_LAYER";
// ipFields->FindField(bsStr, &lFieldIndex);
// if (lFieldIndex != -1)
// {
// vtVal = pCode->GDB_LAYER;
// hr = pFeatureBuffer->put_Value(lFieldIndex, vtVal);
// }
//
// bsStr = "YSDM";
// ipFields->FindField(bsStr, &lFieldIndex);
// if (lFieldIndex != -1)
// {
// vtVal = pCode->YSDM;
// hr = pFeatureBuffer->put_Value(lFieldIndex, vtVal);
// }
//
// bsStr = "YSMC";
// ipFields->FindField(bsStr, &lFieldIndex);
// if (lFieldIndex != -1)
// {
// vtVal = pCode->YSMC;
// hr = pFeatureBuffer->put_Value(lFieldIndex, vtVal);
// }
//
// //bsStr = "SymbolCode";
// //ipFields->FindField(bsStr, &lFieldIndex);
// //if (lFieldIndex != -1)
// //{
// // vtVal = pCode->SymbolCode;
// // hr = pFeatureBuffer->put_Value(lFieldIndex, vtVal);
// //}
//
// bsStr.Empty();
//}
//supported by feature classes in ArcSDE and feature classes and tables in File Geodatabase. It improves performance of data loading.
HRESULT XDWGReader::BeginLoadOnlyMode(IFeatureClass*& pTargetClass)
{
//if (pTargetClass == NULL)
//{
// return S_FALSE;
//}
//IFeatureClassLoadPtr pClassLoad(pTargetClass);
//if (pClassLoad)
//{
// ISchemaLockPtr pSchemaLock(pTargetClass);
// if (pSchemaLock)
// {
// if (SUCCEEDED(pSchemaLock->ChangeSchemaLock(esriExclusiveSchemaLock)))
// {
// VARIANT_BOOL bLoadOnly;
// pClassLoad->get_LoadOnlyMode(&bLoadOnly);
// if (!bLoadOnly)
// return pClassLoad->put_LoadOnlyMode(VARIANT_TRUE);
// else
// return S_OK;
// }
// }
//}
//return S_FALSE;
return S_OK;
}
HRESULT XDWGReader::EndLoadOnlyMode(IFeatureClass*& pTargetClass)
{
//if (pTargetClass == NULL)
//{
// return S_FALSE;
//}
//IFeatureClassLoadPtr pClassLoad(pTargetClass);
//if (pClassLoad)
//{
// ISchemaLockPtr pSchemaLock(pTargetClass);
// if (pSchemaLock)
// {
// if (SUCCEEDED(pSchemaLock->ChangeSchemaLock(esriSharedSchemaLock)))
// {
// VARIANT_BOOL bLoadOnly;
// pClassLoad->get_LoadOnlyMode(&bLoadOnly);
// if (bLoadOnly)
// return pClassLoad->put_LoadOnlyMode(VARIANT_FALSE);
// else
// return S_OK;
// }
// }
//}
//return S_FALSE;
return S_OK;
}
void XDWGReader::ReleaseFeatureBuffer(IFeatureBufferPtr& pFeatureBuffer)
{
if (pFeatureBuffer == NULL)
{
return;
}
//ÊÍ·ÅÄÚ´æ
IGeometry* pShape;
HRESULT hr = pFeatureBuffer->get_Shape(&pShape);
if (SUCCEEDED(hr))
{
if (pShape != NULL)
{
pShape->SetEmpty();
}
}
}
/************************************************************************
¼òÒªÃèÊö : ±àÂë¶ÔÕÕ
ÊäÈë²ÎÊý :
·µ »Ø Öµ :
ÐÞ¸ÄÈÕÖ¾ :
************************************************************************/
BOOL XDWGReader::CompareCodes(IFeatureBuffer*& pFeatureBuffer)
{
return TRUE;
//try
//{
// if (pFeatureBuffer == NULL)
// return FALSE;
// int iCompareCodes = m_aryCodes.GetSize();
// if (iCompareCodes <= 0)
// {
// return TRUE;
// }
// //CString sThickness = GetFeatureBufferFieldValue(pFeatureBuffer, "Thickness");
// CString sBlockname = GetFeatureBufferFieldValue(pFeatureBuffer, "Blockname");
// CString sLayer = GetFeatureBufferFieldValue(pFeatureBuffer, "Layer");
// CString sEntityType = GetFeatureBufferFieldValue(pFeatureBuffer, g_szEntityType);
// //µã£±Blockname->DWG_BLOCKNAME
// //µã£²Layer->DWG_LAYER
// //Ïߣ¬Layer->DWG_LAYER
// XDwg2GdbRecord* pDwg2GdbRecord = NULL;
// IGeometryPtr pGeometry;
// pFeatureBuffer->get_Shape(&pGeometry);
// if (pGeometry == NULL)
// {
// return FALSE;
// }
// esriFeatureType featType;
// IFeaturePtr pFeat;
// pFeat = pFeatureBuffer;
// if (pFeat != NULL)
// {
// pFeat->get_FeatureType(&featType);
// }
// else
// {
// featType = esriFTSimple;
// }
// //×¢¼Çͼ²ã
// if (featType == esriFTAnnotation)
// {
// if (!sLayer.IsEmpty())
// {
// for (int i = 0; i < iCompareCodes; i++)
// {
// pDwg2GdbRecord = m_aryCodes.GetAt(i);
// if (pDwg2GdbRecord->DWG_LAYER.CompareNoCase(sLayer) == 0)
// {
// PutExtraAttributes(pFeatureBuffer, pDwg2GdbRecord);
// return TRUE;
// }
// }
// }
// return FALSE;
// }
// else if (featType == esriFTSimple) //Ò»°ãͼ²ã
// {
// //HRESULT hr;
// CComVariant OID;
// esriGeometryType shapeType;
// pGeometry->get_GeometryType(&shapeType);
// if (shapeType == esriGeometryPoint)
// {
// //µã£±Blockname->DWG_BLOCKNAME
// //µã£²Layer->DWG_LAYER
// if (!sBlockname.IsEmpty())
// {
// for (int i = 0; i < iCompareCodes; i++)
// {
// pDwg2GdbRecord = m_aryCodes.GetAt(i);
// if (pDwg2GdbRecord->DWG_BLOCKNAME.CompareNoCase(sBlockname) == 0)
// {
// PutExtraAttributes(pFeatureBuffer, pDwg2GdbRecord);
// return TRUE;
// }
// }
// }
// else
// {
// if (!sLayer.IsEmpty())
// {
// for (int i = 0; i < iCompareCodes; i++)
// {
// pDwg2GdbRecord = m_aryCodes.GetAt(i);
// if (pDwg2GdbRecord->DWG_LAYER.CompareNoCase(sLayer) == 0)
// {
// PutExtraAttributes(pFeatureBuffer, pDwg2GdbRecord);
// return TRUE;
// }
// }
// }
// }
// return FALSE;
// }
// else //if(shapeType == esriGeometryPolyline)
// {
// //Ïߣ¬Layer->DWG_LAYER
// if (!sLayer.IsEmpty())
// {
// for (int i = 0; i < iCompareCodes; i++)
// {
// pDwg2GdbRecord = m_aryCodes.GetAt(i);
// if (pDwg2GdbRecord->DWG_LAYER.CompareNoCase(sLayer) == 0)
// {
// PutExtraAttributes(pFeatureBuffer, pDwg2GdbRecord);
// return TRUE;
// }
// }
// }
// return FALSE;
// }
// }
//}
//catch (...)
//{
// CString sError;
// sError.Format("±àÂëת»»³ö´í¡£");
// WriteLog(sError);
// return FALSE;
//}
//return FALSE;
}
/************************************************************************
¼òÒªÃèÊö : ´´½¨×¢¼ÇÀàÐ͵ÄÒªËØÀà
ÊäÈë²ÎÊý :
·µ »Ø Öµ :
ÐÞ¸ÄÈÕÖ¾ :
************************************************************************/
IFeatureClass* XDWGReader::CreateAnnoFtCls(IWorkspace* pWS, CString sAnnoName, IFields* pFields)
{
HRESULT hr;
IFeatureWorkspaceAnnoPtr PFWSAnno = pWS;
IGraphicsLayerScalePtr pGLS(CLSID_GraphicsLayerScale);
pGLS->put_Units(esriMeters);
pGLS->put_ReferenceScale(m_dAnnoScale);
//' set up symbol collection
ISymbolCollectionPtr pSymbolColl(CLSID_SymbolCollection);
ITextSymbolPtr myTxtSym(CLSID_TextSymbol);
//Set the font for myTxtSym
IFontDispPtr myFont(CLSID_StdFont);
IFontPtr pFt = myFont;
pFt->put_Name(CComBSTR("Courier New"));
CY cy;
cy.Hi = 0;
cy.Lo = 9;
pFt->put_Size(cy);
myTxtSym->put_Font(myFont);
// Set the Color for myTxtSym to be Dark Red
IRgbColorPtr myColor(CLSID_RgbColor);
myColor->put_Red(150);
myColor->put_Green(0);
myColor->put_Blue (0);
myTxtSym->put_Color(myColor);
// Set other properties for myTxtSym
myTxtSym->put_Angle(0);
myTxtSym->put_RightToLeft(VARIANT_FALSE);
myTxtSym->put_VerticalAlignment(esriTVABaseline);
myTxtSym->put_HorizontalAlignment(esriTHAFull);
myTxtSym->put_Size(200);
//myTxtSym->put_Case(esriTCNormal);
ISymbolPtr pSymbol = myTxtSym;
pSymbolColl->putref_Symbol(0, pSymbol);
//set up the annotation labeling properties including the expression
IAnnotateLayerPropertiesPtr pAnnoProps(CLSID_LabelEngineLayerProperties);
pAnnoProps->put_FeatureLinked(VARIANT_TRUE);
pAnnoProps->put_AddUnplacedToGraphicsContainer(VARIANT_FALSE);
pAnnoProps->put_CreateUnplacedElements(VARIANT_TRUE);
pAnnoProps->put_DisplayAnnotation(VARIANT_TRUE);
pAnnoProps->put_UseOutput(VARIANT_TRUE);
ILabelEngineLayerPropertiesPtr pLELayerProps = pAnnoProps;
IAnnotationExpressionEnginePtr aAnnoVBScriptEngine(CLSID_AnnotationVBScriptEngine);
pLELayerProps->putref_ExpressionParser(aAnnoVBScriptEngine);
pLELayerProps->put_Expression(CComBSTR("[DESCRIPTION]"));
pLELayerProps->put_IsExpressionSimple(VARIANT_TRUE);
pLELayerProps->put_Offset(0);
pLELayerProps->put_SymbolID(0);
pLELayerProps->putref_Symbol(myTxtSym);
IAnnotateLayerTransformationPropertiesPtr pATP = pAnnoProps;
double dRefScale;
pGLS->get_ReferenceScale(&dRefScale);
pATP->put_ReferenceScale(dRefScale);
pATP->put_Units(esriMeters);
pATP->put_ScaleRatio(1);
IAnnotateLayerPropertiesCollectionPtr pAnnoPropsColl(CLSID_AnnotateLayerPropertiesCollection);
pAnnoPropsColl->Add(pAnnoProps);
//' use the AnnotationFeatureClassDescription co - class to get the list of required fields and the default name of the shape field
IObjectClassDescriptionPtr pOCDesc(CLSID_AnnotationFeatureClassDescription);
IFeatureClassDescriptionPtr pFDesc = pOCDesc;
IUIDPtr pInstCLSID;
IUIDPtr pExtCLSID;
CComBSTR bsShapeFieldName;
pOCDesc->get_InstanceCLSID(&pInstCLSID);
pOCDesc->get_ClassExtensionCLSID(&pExtCLSID);
pFDesc->get_ShapeFieldName(&bsShapeFieldName);
/*IFieldsPtr pReqFields;
pOCDesc->get_RequiredFields(&pReqFields);
//ÉèÖÿռä²Î¿¼
if (m_pSpRef != NULL)
{
long numFields;
pReqFields->get_FieldCount(&numFields);
for (int i = 0; i < numFields; i++)
{
IFieldPtr pField;
pReqFields->get_Field(i, &pField);
esriFieldType fldType;
pField->get_Type(&fldType);
if (fldType == esriFieldTypeGeometry)
{
IFieldEditPtr pEdtField = pField;
IGeometryDefPtr pGeoDef;
hr = pEdtField->get_GeometryDef(&pGeoDef);
IGeometryDefEditPtr pEdtGeoDef = pGeoDef;
hr = pEdtGeoDef->putref_SpatialReference(m_pSpRef);
hr = pEdtField->putref_GeometryDef(pGeoDef);
break;
}
}
}
IFieldsEditPtr ipFieldsEdit = pReqFields;
//´´½¨CADÎļþÖÐ×¢¼Çͼ²ã×Ö¶Î
IFieldEditPtr ipFieldEdit;
IFieldPtr ipField;
// ´´½¨ Entity £¬¼Ç¼esriʵÌåÀàÐÍ
ipField.CreateInstance(CLSID_Field);
ipFieldEdit = ipField;
ipFieldEdit->put_Name(CComBSTR("Entity_Type"));
ipFieldEdit->put_AliasName(CComBSTR("Entity_Type"));
ipFieldEdit->put_Type(esriFieldTypeString);
ipFieldsEdit->AddField(ipField);
// ´´½¨ Handle £¬¼Ç¼DWGʵÌå±àºÅ
ipField.CreateInstance(CLSID_Field);
ipFieldEdit = ipField;
ipFieldEdit->put_Name(CComBSTR(L"Handle"));
ipFieldEdit->put_AliasName(CComBSTR(L"Handle"));
ipFieldEdit->put_Type(esriFieldTypeString);
ipFieldEdit->put_Length(150);
ipFieldsEdit->AddField(ipField);
// ´´½¨ BaseName £¬¼Ç¼DWGʵÌå²ãÃû¼´DWGÎļþÃû
ipField.CreateInstance(CLSID_Field);
ipFieldEdit = ipField;
ipFieldEdit->put_Name(CComBSTR(L"BaseName"));
ipFieldEdit->put_AliasName(CComBSTR(L"BaseName"));
ipFieldEdit->put_Type(esriFieldTypeString);
ipFieldEdit->put_Length(250);
ipFieldsEdit->AddField(ipField);
// ´´½¨ Layer £¬¼Ç¼DWGʵÌå²ãÃû
ipField.CreateInstance(CLSID_Field);
ipFieldEdit = ipField;
ipFieldEdit->put_Name(CComBSTR(L"Layer"));
ipFieldEdit->put_AliasName(CComBSTR(L"Layer"));
ipFieldEdit->put_Type(esriFieldTypeString);
ipFieldEdit->put_Length(250);
ipFieldsEdit->AddField(ipField);
// ´´½¨ Color £¬¼Ç¼DWGʵÌå·ûºÅÑÕÉ«
ipField.CreateInstance(CLSID_Field);
ipFieldEdit = ipField;
ipFieldEdit->put_Name(CComBSTR(L"Color"));
ipFieldEdit->put_AliasName(CComBSTR(L"Color"));
ipFieldEdit->put_Type(esriFieldTypeInteger);
ipFieldsEdit->AddField(ipField);
// ´´½¨ Thickness £¬¼Ç¼DWGʵÌåºñ¶È
ipField.CreateInstance(CLSID_Field);
ipFieldEdit = ipField;
ipFieldEdit->put_Name(CComBSTR(L"Thickness"));
ipFieldEdit->put_AliasName(CComBSTR(L"Thickness"));
ipFieldEdit->put_Type(esriFieldTypeString);
ipFieldEdit->put_Length(150);
ipFieldsEdit->AddField(ipField);
// ´´½¨ Elevation £¬¼Ç¼DWGʵÌå¸ß³ÌÖµ
ipField.CreateInstance(CLSID_Field);
ipFieldEdit = ipField;
ipFieldEdit->put_Name(CComBSTR(L"Elevation"));
ipFieldEdit->put_AliasName(CComBSTR(L"Elevation"));
ipFieldEdit->put_Type(esriFieldTypeDouble);
ipFieldsEdit->AddField(ipField);
// ´´½¨ Height £¬¼Ç¼¸ß¶È
ipField.CreateInstance(CLSID_Field);
ipFieldEdit = ipField;
ipFieldEdit->put_Name(CComBSTR(L"Height"));
ipFieldEdit->put_AliasName(CComBSTR(L"Height"));
ipFieldEdit->put_Type(esriFieldTypeDouble);
ipFieldsEdit->AddField(ipField);
// ´´½¨ TextStyle £¬¼Ç¼ÎÄ×ÖÑùʽ
ipField.CreateInstance(CLSID_Field);
ipFieldEdit = ipField;
ipFieldEdit->put_Name(CComBSTR(L"TextStyle"));
ipFieldEdit->put_AliasName(CComBSTR(L"TextStyle"));
ipFieldEdit->put_Type(esriFieldTypeString);
ipFieldEdit->put_Length(150);
ipFieldsEdit->AddField(ipField);
// ´´½¨ Oblique £¬¼Ç¼Çã½Ç
ipField.CreateInstance(CLSID_Field);
ipFieldEdit = ipField;
ipFieldEdit->put_Name(CComBSTR(L"Oblique"));
ipFieldEdit->put_AliasName(CComBSTR(L"Oblique"));
ipFieldEdit->put_Type(esriFieldTypeDouble);
ipFieldsEdit->AddField(ipField);*/
IFeatureClass* pAnnoFtCls;
//' create the new class
hr = PFWSAnno->CreateAnnotationClass(CComBSTR(sAnnoName), pFields, pInstCLSID, pExtCLSID, bsShapeFieldName, CComBSTR(""), NULL, 0, pAnnoPropsColl, pGLS, pSymbolColl, VARIANT_TRUE, &pAnnoFtCls);
return pAnnoFtCls;
}
/************************************************************************
¼òÒªÃèÊö : Éú³É×¢¼ÇElement
ÊäÈë²ÎÊý :
·µ »Ø Öµ :
ÐÞ¸ÄÈÕÖ¾ :
************************************************************************/
ITextElement* XDWGReader::MakeTextElementByStyle(CString strText, double dblAngle,
double dblHeight, double dblX,
double dblY, double ReferenceScale, esriTextHorizontalAlignment horizAlign, esriTextVerticalAlignment vertAlign)
{
HRESULT hr;
ITextElementPtr pTextElement;
ISimpleTextSymbolPtr pTextSymbol;
CString strHeight;
pTextSymbol.CreateInstance(CLSID_TextSymbol);
//'Set the text symbol font by getting the IFontDisp interface
pTextSymbol->put_Font(m_pAnnoTextFont);
double mapUnitsInches;
IUnitConverterPtr pUnitConverter(CLSID_UnitConverter);
pUnitConverter->ConvertUnits(dblHeight, esriMeters, esriInches, &mapUnitsInches);
strHeight.Format("%f", (mapUnitsInches * 72) / ReferenceScale);
double dSize = atof(strHeight);
pTextSymbol->put_Size(dSize);
pTextSymbol->put_HorizontalAlignment(horizAlign);
pTextSymbol->put_VerticalAlignment(vertAlign);
pTextElement.CreateInstance(CLSID_TextElement);
hr = pTextElement->put_ScaleText(VARIANT_TRUE);
hr = pTextElement->put_Text(CComBSTR(strText));
hr = pTextElement->put_Symbol(pTextSymbol);
IElementPtr pElement = pTextElement;
IPointPtr pPoint(CLSID_Point);
hr = pPoint->PutCoords(dblX, dblY);
hr = pElement->put_Geometry(pPoint);
if (fabs(dblAngle) > 0)
{
ITransform2DPtr pTransform2D = pTextElement;
pTransform2D->Rotate(pPoint, dblAngle);
}
return pTextElement.Detach();
}
/********************************************************************
¼òÒªÃèÊö : ÊͷŽӿÚÖ¸Õë
ÊäÈë²ÎÊý :
·µ »Ø Öµ :
ÐÞ¸ÄÈÕÖ¾ :
*********************************************************************/
int XDWGReader::ReleasePointer(IUnknown*& pInterface)
{
int iRst = 0;
if (pInterface != NULL)
{
try
{
iRst = pInterface->Release();
pInterface = NULL;
}
catch(...)
{
}
}
return iRst;
}
// ÊͷŽӿڶÔÏó
void XDWGReader::ReleaseAOs(void)
{
int iRst = 0;
iRst = ReleasePointer((IUnknown*&)m_pPointFeatureCursor);
iRst = ReleasePointer((IUnknown*&)m_pTextFeatureCursor);
iRst = ReleasePointer((IUnknown*&)m_pLineFeatureCursor);
iRst = ReleasePointer((IUnknown*&)m_pAnnoFeatureCursor);
iRst = ReleasePointer((IUnknown*&)m_pPolygonFeatureCursor);
iRst = ReleasePointer((IUnknown*&)m_pExtentTableRowCursor);
iRst = ReleasePointer((IUnknown*&)m_pPointFeatureBuffer);
iRst = ReleasePointer((IUnknown*&)m_pTextFeatureBuffer);
iRst = ReleasePointer((IUnknown*&)m_pLineFeatureBuffer);
iRst = ReleasePointer((IUnknown*&)m_pAnnoFeatureBuffer);
iRst = ReleasePointer((IUnknown*&)m_pPolygonFeatureBuffer);
iRst = ReleasePointer((IUnknown*&)m_pExtentTableRowBuffer);
iRst = ReleasePointer((IUnknown*&)m_pSpRef);
iRst = ReleasePointer((IUnknown*&)m_pFeatClassPoint);
iRst = ReleasePointer((IUnknown*&)m_pFeatClassText);
iRst = ReleasePointer((IUnknown*&)m_pFeatClassLine);
iRst = ReleasePointer((IUnknown*&)m_pFeatClassPolygon);
iRst = ReleasePointer((IUnknown*&)m_pAnnoFtCls);
iRst = ReleasePointer((IUnknown*&)m_pExtendTable);
}
/********************************************************************
¼òÒªÃèÊö :³õʼ»¯¶ÔÏóÖ¸Õë
ÊäÈë²ÎÊý :
·µ »Ø Öµ :
ÐÞ¸ÄÈÕÖ¾ :
*********************************************************************/
void XDWGReader::InitAOPointers(void)
{
m_pPointFeatureCursor = NULL;
m_pTextFeatureCursor = NULL;
m_pLineFeatureCursor = NULL;
m_pAnnoFeatureCursor = NULL;
m_pPolygonFeatureCursor = NULL;
m_pExtentTableRowCursor = NULL;
m_pPointFeatureBuffer = NULL;
m_pTextFeatureBuffer = NULL;
m_pLineFeatureBuffer = NULL;
m_pAnnoFeatureBuffer = NULL;
m_pPolygonFeatureBuffer = NULL;
m_pExtentTableRowBuffer = NULL;
m_pFeatClassPoint = NULL;
m_pFeatClassLine = NULL;
m_pFeatClassPolygon = NULL;
m_pAnnoFtCls = NULL;
m_pExtendTable = NULL;
m_pFeatClassText = NULL;
}<|fim▁end|> | // ´´½¨ Blocknumber £¬¼Ç¼ÿ¸öBlock±àºÅ
ipField.CreateInstance(CLSID_Field);
ipFieldEdit = ipField; |
<|file_name|>methods-are-with-self-type.rs<|end_file_name|><|fim▁begin|>// Currently, all generic functions are instantiated in each codegen unit that
// uses them, even those not marked with #[inline], so this test does not make
// much sense at the moment.
// ignore-test
//
// We specify incremental here because we want to test the partitioning for
// incremental compilation
// incremental
// compile-flags:-Zprint-mono-items=lazy
#![allow(dead_code)]
#![feature(start)]
struct SomeType;
struct SomeGenericType<T1, T2>(T1, T2);
mod mod1 {
use super::{SomeType, SomeGenericType};
// Even though the impl is in `mod1`, the methods should end up in the
// parent module, since that is where their self-type is.
impl SomeType {
//~ MONO_ITEM fn methods_are_with_self_type::mod1[0]::{{impl}}[0]::method[0] @@ methods_are_with_self_type[External]
fn method(&self) {}
//~ MONO_ITEM fn methods_are_with_self_type::mod1[0]::{{impl}}[0]::associated_fn[0] @@ methods_are_with_self_type[External]
fn associated_fn() {}
}
impl<T1, T2> SomeGenericType<T1, T2> {
pub fn method(&self) {}
pub fn associated_fn(_: T1, _: T2) {}
}
}
trait Trait {
fn foo(&self);
fn default(&self) {}
}
// We provide an implementation of `Trait` for all types. The corresponding
// monomorphizations should end up in whichever module the concrete `T` is.
impl<T> Trait for T
{
fn foo(&self) {}<|fim▁hole|>mod type1 {
pub struct Struct;
}
mod type2 {
pub struct Struct;
}
//~ MONO_ITEM fn methods_are_with_self_type::start[0]
#[start]
fn start(_: isize, _: *const *const u8) -> isize {
//~ MONO_ITEM fn methods_are_with_self_type::mod1[0]::{{impl}}[1]::method[0]<u32, u64> @@ methods_are_with_self_type.volatile[WeakODR]
SomeGenericType(0u32, 0u64).method();
//~ MONO_ITEM fn methods_are_with_self_type::mod1[0]::{{impl}}[1]::associated_fn[0]<char, &str> @@ methods_are_with_self_type.volatile[WeakODR]
SomeGenericType::associated_fn('c', "&str");
//~ MONO_ITEM fn methods_are_with_self_type::{{impl}}[0]::foo[0]<methods_are_with_self_type::type1[0]::Struct[0]> @@ methods_are_with_self_type-type1.volatile[WeakODR]
type1::Struct.foo();
//~ MONO_ITEM fn methods_are_with_self_type::{{impl}}[0]::foo[0]<methods_are_with_self_type::type2[0]::Struct[0]> @@ methods_are_with_self_type-type2.volatile[WeakODR]
type2::Struct.foo();
//~ MONO_ITEM fn methods_are_with_self_type::Trait[0]::default[0]<methods_are_with_self_type::type1[0]::Struct[0]> @@ methods_are_with_self_type-type1.volatile[WeakODR]
type1::Struct.default();
//~ MONO_ITEM fn methods_are_with_self_type::Trait[0]::default[0]<methods_are_with_self_type::type2[0]::Struct[0]> @@ methods_are_with_self_type-type2.volatile[WeakODR]
type2::Struct.default();
0
}
//~ MONO_ITEM drop-glue i8<|fim▁end|> | }
|