max_stars_count
int64 301
224k
| text
stringlengths 6
1.05M
| token_count
int64 3
727k
|
---|---|---|
60,067 | #include <gtest/gtest.h>
#include <torch/csrc/jit/mobile/nnc/registry.h>
namespace torch {
namespace jit {
namespace mobile {
namespace nnc {
extern "C" {
int generated_asm_kernel_foo(void**) {
return 1;
}
int generated_asm_kernel_bar(void**) {
return 2;
}
} // extern "C"
REGISTER_NNC_KERNEL("foo:v1:VERTOKEN", generated_asm_kernel_foo)
REGISTER_NNC_KERNEL("bar:v1:VERTOKEN", generated_asm_kernel_bar)
TEST(MobileNNCRegistryTest, FindAndRun) {
auto foo_kernel = registry::get_nnc_kernel("foo:v1:VERTOKEN");
EXPECT_EQ(foo_kernel->execute(nullptr), 1);
auto bar_kernel = registry::get_nnc_kernel("bar:v1:VERTOKEN");
EXPECT_EQ(bar_kernel->execute(nullptr), 2);
}
TEST(MobileNNCRegistryTest, NoKernel) {
EXPECT_EQ(registry::has_nnc_kernel("missing"), false);
}
} // namespace nnc
} // namespace mobile
} // namespace jit
} // namespace torch
| 344 |
589 | package rocks.inspectit.shared.cs.ci.business.expression;
import java.util.List;
import org.codehaus.jackson.annotate.JsonIgnore;
/**
* This interface provides a common access to {@link AbstractExpression} instances that serve as
* container expression (i.e. expressions that contain other expressions).
*
* @author <NAME>
*
*/
public interface IContainerExpression {
/**
* Adds an operand to this container expression.
*
* @param operand
* {@link AbstractExpression} instance to add as operand
*/
void addOperand(AbstractExpression operand);
/**
* Indicates whether an operand can be added.
*
* @return true, if operand can be added, otherwise false.
*/
boolean canAddOperand();
/**
* Returns the list of operands ({@link AbstractExpression} instances).
*
* @return Returns the list of operands ({@link AbstractExpression} instances).
*/
List<AbstractExpression> getOperands();
/**
* Removes the given operand from this container expression.
*
* @param operand
* {@link AbstractExpression} instance to be removed
*/
void removeOperand(AbstractExpression operand);
/**
* Returns the number of child expression currently attached to this expression.
*
* @return The number of child expression currently attached to this expression.
*/
@JsonIgnore
int getNumberOfChildExpressions();
}
| 407 |
4,054 | // Copyright Yahoo. Licensed under the terms of the Apache 2.0 license. See LICENSE in the project root.
//
#pragma once
#include "polymorphicarraybase.h"
namespace vespalib {
/**
* Describes an interface an array of polymorphic types.
* The intention is to allow efficient implementations when that is possible
* while still enjoying the flexibility of the polymorph interface.
* It is not a full feldged Array implementation as std::vector. It contains just
* the minimum required to allow for efficient implementations for document::ArrayFieldValue.
*
* You specify the base type the interface shall provide. This base type must define
* virtual void assign(const B & rhs);
* For use with ComplexArrayT your type also need
* virtual T * clone() const;
*/
template<typename B>
class IArrayT : public IArrayBase {
public:
class iterator {
public:
iterator(IArrayT &a, size_t i) : _a(&a), _i(i) {}
iterator operator+(size_t diff) const { return iterator(*_a, _i + diff); }
iterator &operator++() {
++_i;
return *this;
}
iterator operator++(int) {
iterator other(*this);
++_i;
return other;
}
bool operator==(const iterator &other) const { return (_a == other._a) && (_i == other._i); }
bool operator!=(const iterator &other) const { return (_i != other._i) || (_a != other._a); }
B &operator*() { return (*_a)[_i]; }
B *operator->() { return &(*_a)[_i]; }
friend ssize_t operator-(const iterator &a, const iterator &b) { return a._i - b._i; }
private:
IArrayT *_a;
size_t _i;
};
class const_iterator {
public:
const_iterator(const IArrayT &a, size_t i) : _a(&a), _i(i) {}
const_iterator operator+(size_t diff) const { return const_iterator(*_a, _i + diff); }
const_iterator &operator++() {
++_i;
return *this;
}
const_iterator operator++(int) {
const_iterator other(*this);
++_i;
return other;
}
bool operator==(const const_iterator &other) const { return (_a == other._a) && (_i == other._i); }
bool operator!=(const const_iterator &other) const { return (_i != other._i) || (_a != other._a); }
const B &operator*() const { return (*_a)[_i]; }
const B *operator->() const { return &(*_a)[_i]; }
size_t operator-(const const_iterator &b) const { return _i - b._i; }
private:
const IArrayT *_a;
size_t _i;
};
typedef std::unique_ptr<IArrayT> UP;
virtual const B &operator[](size_t i) const = 0;
virtual B &operator[](size_t i) = 0;
virtual IArrayT *clone() const override = 0;
virtual iterator erase(iterator it) = 0;
virtual const_iterator begin() const { return const_iterator(*this, 0); }
virtual const_iterator end() const { return const_iterator(*this, size()); }
virtual iterator begin() { return iterator(*this, 0); }
virtual iterator end() { return iterator(*this, size()); }
virtual void push_back(const B &v) = 0;
};
}
| 1,211 |
745 | #include <iostream>
#include <numeric>
#include <initializer_list>
#include <cstdlib>
#include <stdlib.h>
#include <half.hpp>
#include "config.hpp"
#include "print.hpp"
#include "device.hpp"
#include "host_tensor.hpp"
#include "host_tensor_generator.hpp"
#include "conv_common.hpp"
#include "host_conv_bwd_data.hpp"
#include "device_tensor.hpp"
#include "device_convolution_backward_data_implicit_gemm_v4r1_xdlops_nhwc_kyxc_nhwk.hpp"
#include "device_convolution_backward_data_implicit_gemm_v4r1r2_xdlops_nhwc_kyxc_nhwk.hpp"
#define USE_MODE 1
#define USE_CONV_BWD_V4R1_XDL_NHWC 1
#define USE_CONV_BWD_V4R1R2_XDL_NHWC 1
enum ConvBackwardDataAlgo
{
V4R1XDLNHWC,
V4R1R2XDLNHWC,
};
int main(int argc, char* argv[])
{
using namespace ck;
constexpr auto I0 = Number<0>{};
constexpr auto I1 = Number<1>{};
constexpr auto I2 = Number<2>{};
constexpr auto I3 = Number<3>{};
constexpr auto I4 = Number<4>{};
constexpr auto I5 = Number<5>{};
constexpr auto I6 = Number<6>{};
#if USE_MODE
// dynamic mode
if(argc != 22)
{
printf("arg1 to 5: layout, algo, do_verification, init_method, do_log, nrepeat\n");
printf("rest: N, K, C, Y, X, Hi, Wi, Sy, Sx, Dy, Dx, LeftPy, LeftPx, RightPy, RightPx\n");
exit(1);
}
const ConvTensorLayout layout = static_cast<ConvTensorLayout>(std::stoi(argv[1]));
const ConvBackwardDataAlgo algo = static_cast<ConvBackwardDataAlgo>(std::stoi(argv[2]));
const bool do_verification = std::stoi(argv[3]);
const int init_method = std::stoi(argv[4]);
const bool do_log = std::stoi(argv[5]);
const int nrepeat = std::stoi(argv[6]);
const index_t N = std::stoi(argv[7]);
const index_t K = std::stoi(argv[8]);
const index_t C = std::stoi(argv[9]);
const index_t Y = std::stoi(argv[10]);
const index_t X = std::stoi(argv[11]);
const index_t Hi = std::stoi(argv[12]);
const index_t Wi = std::stoi(argv[13]);
const index_t conv_stride_h = std::stoi(argv[14]);
const index_t conv_stride_w = std::stoi(argv[15]);
const index_t conv_dilation_h = std::stoi(argv[16]);
const index_t conv_dilation_w = std::stoi(argv[17]);
const index_t in_left_pad_h = std::stoi(argv[18]);
const index_t in_left_pad_w = std::stoi(argv[19]);
const index_t in_right_pad_h = std::stoi(argv[20]);
const index_t in_right_pad_w = std::stoi(argv[21]);
const index_t YEff = (Y - 1) * conv_dilation_h + 1;
const index_t XEff = (X - 1) * conv_dilation_w + 1;
const index_t Ho = (Hi + in_left_pad_h + in_right_pad_h - YEff) / conv_stride_h + 1;
const index_t Wo = (Wi + in_left_pad_w + in_right_pad_w - XEff) / conv_stride_w + 1;
#else
// static mode
if(argc < 7)
{
printf("arg1 to 5: layout, algo, do_verification, init_method, do_log, nrepeat\n");
exit(1);
}
const ConvTensorLayout layout = static_cast<ConvTensorLayout>(std::stoi(argv[1]));
const ConvBackwardDataAlgo algo = static_cast<ConvBackwardDataAlgo>(std::stoi(argv[2]));
const bool do_verification = std::stoi(argv[3]);
const int init_method = std::stoi(argv[4]);
const bool do_log = std::stoi(argv[5]);
const int nrepeat = std::stoi(argv[6]);
constexpr index_t N = 128;
constexpr index_t C = 192;
constexpr index_t Hi = 71;
constexpr index_t Wi = 71;
constexpr index_t K = 256;
constexpr index_t Y = 3;
constexpr index_t X = 3;
const index_t conv_stride_h = 2;
const index_t conv_stride_w = 2;
const index_t conv_dilation_h = 1;
const index_t conv_dilation_w = 1;
const index_t in_left_pad_h = 1;
const index_t in_left_pad_w = 1;
const index_t in_right_pad_h = 1;
const index_t in_right_pad_w = 1;
const index_t YEff = (Y - 1) * conv_dilation_h + 1;
const index_t XEff = (X - 1) * conv_dilation_w + 1;
const index_t Ho = (Hi + in_left_pad_h + in_right_pad_h - YEff) / conv_stride_h + 1;
const index_t Wo = (Wi + in_left_pad_w + in_right_pad_w - XEff) / conv_stride_w + 1;
#endif
#if 0
using in_data_t = float;
using acc_data_t = float;
using out_data_t = float;
#elif 1
using in_data_t = half_t;
using acc_data_t = float;
using out_data_t = half_t;
#endif
std::vector<std::size_t> in_lengths_host(4), wei_lengths_host(4), out_lengths_host(4);
if(layout == ConvTensorLayout::NCHW)
{
in_lengths_host[0] = static_cast<std::size_t>(N);
in_lengths_host[1] = static_cast<std::size_t>(C);
in_lengths_host[2] = static_cast<std::size_t>(Hi);
in_lengths_host[3] = static_cast<std::size_t>(Wi);
wei_lengths_host[0] = static_cast<std::size_t>(K);
wei_lengths_host[1] = static_cast<std::size_t>(C);
wei_lengths_host[2] = static_cast<std::size_t>(Y);
wei_lengths_host[3] = static_cast<std::size_t>(X);
out_lengths_host[0] = static_cast<std::size_t>(N);
out_lengths_host[1] = static_cast<std::size_t>(K);
out_lengths_host[2] = static_cast<std::size_t>(Ho);
out_lengths_host[3] = static_cast<std::size_t>(Wo);
}
else if(layout == ConvTensorLayout::NHWC)
{
in_lengths_host[0] = static_cast<std::size_t>(N);
in_lengths_host[1] = static_cast<std::size_t>(Hi);
in_lengths_host[2] = static_cast<std::size_t>(Wi);
in_lengths_host[3] = static_cast<std::size_t>(C);
wei_lengths_host[0] = static_cast<std::size_t>(K);
wei_lengths_host[1] = static_cast<std::size_t>(Y);
wei_lengths_host[2] = static_cast<std::size_t>(X);
wei_lengths_host[3] = static_cast<std::size_t>(C);
out_lengths_host[0] = static_cast<std::size_t>(N);
out_lengths_host[1] = static_cast<std::size_t>(Ho);
out_lengths_host[2] = static_cast<std::size_t>(Wo);
out_lengths_host[3] = static_cast<std::size_t>(K);
}
else
{
throw std::runtime_error("wrong! not implemented");
}
Tensor<in_data_t> in_host(in_lengths_host);
Tensor<in_data_t> in_device(in_lengths_host);
Tensor<in_data_t> wei(wei_lengths_host);
Tensor<out_data_t> out(out_lengths_host);
std::cout << "layout: " << layout << std::endl;
ostream_HostTensorDescriptor(in_host.mDesc, std::cout << "in: ");
ostream_HostTensorDescriptor(wei.mDesc, std::cout << "wei: ");
ostream_HostTensorDescriptor(out.mDesc, std::cout << "out: ");
print_array("InLeftPads", make_tuple(in_left_pad_h, in_left_pad_w));
print_array("InRightPads", make_tuple(in_right_pad_h, in_right_pad_w));
print_array("ConvStrides", make_tuple(conv_stride_h, conv_stride_w));
print_array("ConvDilations", make_tuple(conv_dilation_h, conv_dilation_w));
std::size_t num_thread = std::thread::hardware_concurrency();
switch(init_method)
{
case 0:
// no initialization
break;
case 1:
out.GenerateTensorValue(GeneratorTensor_1{}, num_thread);
wei.GenerateTensorValue(GeneratorTensor_1{}, num_thread);
break;
case 2:
out.GenerateTensorValue(GeneratorTensor_1{}, num_thread);
wei.GenerateTensorValue(GeneratorTensor_2{-5, 5}, num_thread);
break;
case 3:
out.GenerateTensorValue(GeneratorTensor_2{-5, 5}, num_thread);
wei.GenerateTensorValue(GeneratorTensor_1{}, num_thread);
break;
case 4:
out.GenerateTensorValue(GeneratorTensor_2{-5, 5}, num_thread);
wei.GenerateTensorValue(GeneratorTensor_2{-5, 5}, num_thread);
break;
case 5:
out.GenerateTensorValue(GeneratorTensor_3<float>{0.0, 1.0}, num_thread);
wei.GenerateTensorValue(GeneratorTensor_3<float>{-0.5, 0.5}, num_thread);
break;
default:
out.GenerateTensorValue(GeneratorTensor_2{1, 5}, num_thread);
auto gen_wei = [](auto... is) {
return GeneratorTensor_2{1, 5}(is...) * GeneratorTensor_Checkboard{}(is...);
};
wei.GenerateTensorValue(gen_wei, num_thread);
}
auto f_make_for_device_nhwc = [&]() {
#if USE_MODE
const auto in_lengths_dev = make_tuple(N, Hi, Wi, C);
const auto wei_lengths_dev = make_tuple(K, Y, X, C);
const auto out_lengths_dev = make_tuple(N, Ho, Wo, K);
const auto conv_strides_dev = make_tuple(conv_stride_h, conv_stride_w);
const auto conv_dilations_dev = make_tuple(conv_dilation_h, conv_dilation_w);
const auto in_left_pads_dev = make_tuple(in_left_pad_h, in_left_pad_w);
const auto in_right_pads_dev = make_tuple(in_right_pad_h, in_right_pad_w);
#else
const auto in_lengths_dev =
make_tuple(Number<N>{}, Number<Hi>{}, Number<Wi>{}, Number<C>{});
const auto wei_lengths_dev = make_tuple(Number<K>{}, Number<Y>{}, Number<X>{}, Number<C>{});
const auto out_lengths_dev =
make_tuple(Number<N>{}, Number<Ho>{}, Number<Wo>{}, Number<K>{});
const auto conv_strides_dev = make_tuple(Number<conv_stride_h>{}, Number<conv_stride_w>{});
const auto conv_dilations_dev =
make_tuple(Number<conv_dilation_h>{}, Number<conv_dilation_w>{});
const auto in_left_pads_dev = make_tuple(Number<in_left_pad_h>{}, Number<in_left_pad_w>{});
const auto in_right_pads_dev =
make_tuple(Number<in_right_pad_h>{}, Number<in_right_pad_w>{});
#endif
return make_tuple(in_lengths_dev,
wei_lengths_dev,
out_lengths_dev,
conv_strides_dev,
conv_dilations_dev,
in_left_pads_dev,
in_right_pads_dev);
};
#if USE_CONV_BWD_V4R1_XDL_NHWC
if(algo == ConvBackwardDataAlgo::V4R1XDLNHWC)
{
if(layout != ConvTensorLayout::NHWC)
{
throw std::runtime_error("wrong! layout");
}
const auto tmp = f_make_for_device_nhwc();
device_convolution_backward_data_implicit_gemm_v4r1_xdlops_nhwc_kyxc_nhwk<in_data_t,
acc_data_t,
out_data_t>(
tmp[I0],
tmp[I1],
tmp[I2],
tmp[I3],
tmp[I4],
tmp[I5],
tmp[I6],
in_device,
wei,
out,
nrepeat);
}
#endif
#if USE_CONV_BWD_V4R1R2_XDL_NHWC
if(algo == ConvBackwardDataAlgo::V4R1R2XDLNHWC)
{
if(layout != ConvTensorLayout::NHWC)
{
throw std::runtime_error("wrong! layout");
}
const auto tmp = f_make_for_device_nhwc();
device_convolution_backward_data_implicit_gemm_v4r1r2_xdlops_nhwc_kyxc_nhwk<in_data_t,
acc_data_t,
out_data_t>(
tmp[I0],
tmp[I1],
tmp[I2],
tmp[I3],
tmp[I4],
tmp[I5],
tmp[I6],
in_device,
wei,
out,
nrepeat);
}
#endif
if(do_verification)
{
host_direct_convolution_backward_data(in_host,
wei,
out,
make_tuple(conv_stride_h, conv_stride_w),
make_tuple(conv_dilation_h, conv_dilation_w),
make_tuple(in_left_pad_h, in_left_pad_w),
make_tuple(in_right_pad_h, in_right_pad_w),
layout);
check_error(in_host, in_device);
if(do_log)
{
LogRangeAsType<float>(std::cout << "out : ", out.mData, ",") << std::endl;
LogRangeAsType<float>(std::cout << "wei: ", wei.mData, ",") << std::endl;
LogRangeAsType<float>(std::cout << "in_host : ", in_host.mData, ",") << std::endl;
LogRangeAsType<float>(std::cout << "in_device: ", in_device.mData, ",") << std::endl;
}
}
}
| 6,604 |
575 | <filename>third_party/blink/renderer/core/testing/sim/sim_test.cc<gh_stars>100-1000
// Copyright 2015 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "third_party/blink/renderer/core/testing/sim/sim_test.h"
#include "base/run_loop.h"
#include "content/test/test_blink_web_unit_test_support.h"
#include "third_party/blink/public/platform/web_cache.h"
#include "third_party/blink/public/web/web_navigation_params.h"
#include "third_party/blink/renderer/core/dom/document.h"
#include "third_party/blink/renderer/core/exported/web_view_impl.h"
#include "third_party/blink/renderer/core/frame/local_dom_window.h"
#include "third_party/blink/renderer/core/frame/web_local_frame_impl.h"
#include "third_party/blink/renderer/core/loader/document_loader.h"
#include "third_party/blink/renderer/core/scroll/scrollbar_theme.h"
#include "third_party/blink/renderer/platform/testing/unit_test_helpers.h"
namespace blink {
SimTest::SimTest() {
Document::SetThreadedParsingEnabledForTesting(false);
// Threaded animations are usually enabled for blink. However these tests use
// synchronous compositing, which can not run threaded animations.
bool was_threaded_animation_enabled =
content::TestBlinkWebUnitTestSupport::SetThreadedAnimationEnabled(false);
// If this fails, we'd be resetting IsThreadedAnimationEnabled() to the wrong
// thing in the destructor.
DCHECK(was_threaded_animation_enabled);
}
SimTest::~SimTest() {
Document::SetThreadedParsingEnabledForTesting(true);
content::TestBlinkWebUnitTestSupport::SetThreadedAnimationEnabled(true);
WebCache::Clear();
}
void SimTest::SetUp() {
Test::SetUp();
// SimCompositor overrides the LayerTreeViewDelegate to respond to
// BeginMainFrame(), which will update and paint the main frame of the
// WebViewImpl given to SetWebView().
network_ = std::make_unique<SimNetwork>();
compositor_ = std::make_unique<SimCompositor>();
web_frame_client_ =
std::make_unique<frame_test_helpers::TestWebFrameClient>();
web_view_client_ = std::make_unique<frame_test_helpers::TestWebViewClient>();
page_ = std::make_unique<SimPage>();
web_view_helper_ =
std::make_unique<frame_test_helpers::WebViewHelper>(base::BindRepeating(
&SimTest::CreateTestWebFrameWidget, base::Unretained(this)));
web_view_helper_->Initialize(web_frame_client_.get(), web_view_client_.get());
compositor_->SetWebView(WebView(), *web_view_client_);
page_->SetPage(WebView().GetPage());
local_frame_root_ = WebView().MainFrameImpl();
compositor_->SetLayerTreeHost(
local_frame_root_->FrameWidgetImpl()->LayerTreeHostForTesting());
WebView().MainFrameViewWidget()->Resize(gfx::Size(300, 200));
}
void SimTest::TearDown() {
// Pump the message loop to process the load event.
//
// Use RunUntilIdle() instead of blink::test::RunPendingTask(), because
// blink::test::RunPendingTask() posts directly to
// Thread::Current()->GetTaskRunner(), which makes it incompatible with a
// TestingPlatformSupportWithMockScheduler.
base::RunLoop().RunUntilIdle();
// Shut down this stuff before settings change to keep the world
// consistent, and before the subclass tears down.
web_view_helper_.reset();
page_.reset();
web_view_client_.reset();
web_frame_client_.reset();
compositor_.reset();
network_.reset();
local_frame_root_ = nullptr;
base::RunLoop().RunUntilIdle();
}
void SimTest::InitializeRemote() {
web_view_helper_->InitializeRemote();
compositor_->SetWebView(WebView(), *web_view_client_);
page_->SetPage(WebView().GetPage());
web_frame_client_ =
std::make_unique<frame_test_helpers::TestWebFrameClient>();
local_frame_root_ = web_view_helper_->CreateLocalChild(
*WebView().MainFrame()->ToWebRemoteFrame(), "local_frame_root",
WebFrameOwnerProperties(), nullptr, web_frame_client_.get());
compositor_->SetLayerTreeHost(
local_frame_root_->FrameWidgetImpl()->LayerTreeHostForTesting());
}
void SimTest::LoadURL(const String& url_string) {
KURL url(url_string);
frame_test_helpers::LoadFrameDontWait(local_frame_root_.Get(), url);
if (DocumentLoader::WillLoadUrlAsEmpty(url) || url.ProtocolIsData()) {
// Empty documents and data urls are not using mocked out SimRequests,
// but instead load data directly.
frame_test_helpers::PumpPendingRequestsForFrameToLoad(
local_frame_root_.Get());
}
}
LocalDOMWindow& SimTest::Window() {
return *GetDocument().domWindow();
}
SimPage& SimTest::GetPage() {
return *page_;
}
Document& SimTest::GetDocument() {
return *WebView().MainFrameImpl()->GetFrame()->GetDocument();
}
WebViewImpl& SimTest::WebView() {
return *web_view_helper_->GetWebView();
}
WebLocalFrameImpl& SimTest::MainFrame() {
return *WebView().MainFrameImpl();
}
WebLocalFrameImpl& SimTest::LocalFrameRoot() {
return *local_frame_root_;
}
frame_test_helpers::TestWebViewClient& SimTest::WebViewClient() {
return *web_view_client_;
}
frame_test_helpers::TestWebFrameClient& SimTest::WebFrameClient() {
return *web_frame_client_;
}
SimWebFrameWidget& SimTest::GetWebFrameWidget() {
return *static_cast<SimWebFrameWidget*>(local_frame_root_->FrameWidgetImpl());
}
SimCompositor& SimTest::Compositor() {
return *compositor_;
}
Vector<String>& SimTest::ConsoleMessages() {
return web_frame_client_->ConsoleMessages();
}
SimWebFrameWidget* SimTest::CreateSimWebFrameWidget(
base::PassKey<WebLocalFrame> pass_key,
CrossVariantMojoAssociatedRemote<mojom::blink::FrameWidgetHostInterfaceBase>
frame_widget_host,
CrossVariantMojoAssociatedReceiver<mojom::blink::FrameWidgetInterfaceBase>
frame_widget,
CrossVariantMojoAssociatedRemote<mojom::blink::WidgetHostInterfaceBase>
widget_host,
CrossVariantMojoAssociatedReceiver<mojom::blink::WidgetInterfaceBase>
widget,
scoped_refptr<base::SingleThreadTaskRunner> task_runner,
const viz::FrameSinkId& frame_sink_id,
bool hidden,
bool never_composited,
bool is_for_child_local_root,
bool is_for_nested_main_frame,
SimCompositor* compositor) {
return MakeGarbageCollected<SimWebFrameWidget>(
compositor, std::move(pass_key), std::move(frame_widget_host),
std::move(frame_widget), std::move(widget_host), std::move(widget),
std::move(task_runner), frame_sink_id, hidden, never_composited,
is_for_child_local_root, is_for_nested_main_frame);
}
frame_test_helpers::TestWebFrameWidget* SimTest::CreateTestWebFrameWidget(
base::PassKey<WebLocalFrame> pass_key,
CrossVariantMojoAssociatedRemote<mojom::blink::FrameWidgetHostInterfaceBase>
frame_widget_host,
CrossVariantMojoAssociatedReceiver<mojom::blink::FrameWidgetInterfaceBase>
frame_widget,
CrossVariantMojoAssociatedRemote<mojom::blink::WidgetHostInterfaceBase>
widget_host,
CrossVariantMojoAssociatedReceiver<mojom::blink::WidgetInterfaceBase>
widget,
scoped_refptr<base::SingleThreadTaskRunner> task_runner,
const viz::FrameSinkId& frame_sink_id,
bool hidden,
bool never_composited,
bool is_for_child_local_root,
bool is_for_nested_main_frame) {
return CreateSimWebFrameWidget(
std::move(pass_key), std::move(frame_widget_host),
std::move(frame_widget), std::move(widget_host), std::move(widget),
std::move(task_runner), frame_sink_id, hidden, never_composited,
is_for_child_local_root, is_for_nested_main_frame, compositor_.get());
}
} // namespace blink
| 2,651 |
812 | <filename>platforms/android/FacebookLib/src/com/facebook/Settings.java
/**
* Copyright 2010-present Facebook.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.facebook;
import android.content.ContentResolver;
import android.content.Context;
import android.content.SharedPreferences;
import android.content.pm.ApplicationInfo;
import android.content.pm.PackageInfo;
import android.content.pm.PackageManager;
import android.content.pm.Signature;
import android.database.Cursor;
import android.net.Uri;
import android.os.AsyncTask;
import android.os.Handler;
import android.os.Looper;
import android.util.Base64;
import android.util.Log;
import com.facebook.android.BuildConfig;
import com.facebook.internal.AttributionIdentifiers;
import com.facebook.internal.Utility;
import com.facebook.internal.Validate;
import com.facebook.model.GraphObject;
import org.json.JSONException;
import org.json.JSONObject;
import java.lang.reflect.Field;
import java.security.MessageDigest;
import java.security.NoSuchAlgorithmException;
import java.util.Arrays;
import java.util.Collections;
import java.util.HashSet;
import java.util.Set;
import java.util.concurrent.*;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.concurrent.atomic.AtomicLong;
/**
* Allows some customization of sdk behavior.
*/
public final class Settings {
private static final String TAG = Settings.class.getCanonicalName();
private static final HashSet<LoggingBehavior> loggingBehaviors =
new HashSet<LoggingBehavior>(Arrays.asList(LoggingBehavior.DEVELOPER_ERRORS));
private static volatile Executor executor;
private static volatile boolean shouldAutoPublishInstall;
private static volatile String appVersion;
private static volatile String applicationId;
private static volatile String appClientToken;
private static volatile boolean defaultsLoaded = false;
private static final String FACEBOOK_COM = "facebook.com";
private static volatile String facebookDomain = FACEBOOK_COM;
private static AtomicLong onProgressThreshold = new AtomicLong(65536);
private static volatile boolean platformCompatibilityEnabled;
private static volatile boolean isDebugEnabled = BuildConfig.DEBUG;
private static final int DEFAULT_CORE_POOL_SIZE = 5;
private static final int DEFAULT_MAXIMUM_POOL_SIZE = 128;
private static final int DEFAULT_KEEP_ALIVE = 1;
private static final Object LOCK = new Object();
private static final Uri ATTRIBUTION_ID_CONTENT_URI =
Uri.parse("content://com.facebook.katana.provider.AttributionIdProvider");
private static final String ATTRIBUTION_ID_COLUMN_NAME = "aid";
private static final String ATTRIBUTION_PREFERENCES = "com.facebook.sdk.attributionTracking";
private static final String PUBLISH_ACTIVITY_PATH = "%s/activities";
private static final String MOBILE_INSTALL_EVENT = "MOBILE_APP_INSTALL";
private static final String ANALYTICS_EVENT = "event";
private static final String AUTO_PUBLISH = "auto_publish";
private static final String APP_EVENT_PREFERENCES = "com.facebook.sdk.appEventPreferences";
private static final BlockingQueue<Runnable> DEFAULT_WORK_QUEUE = new LinkedBlockingQueue<Runnable>(10);
private static final ThreadFactory DEFAULT_THREAD_FACTORY = new ThreadFactory() {
private final AtomicInteger counter = new AtomicInteger(0);
public Thread newThread(Runnable runnable) {
return new Thread(runnable, "FacebookSdk #" + counter.incrementAndGet());
}
};
/**
* loadDefaultsFromMetadata will attempt to load certain settings (e.g., application ID, client token) from
* metadata in the app's AndroidManifest.xml. The application ID will be read from this key.
*/
public static final String APPLICATION_ID_PROPERTY = "com.facebook.sdk.ApplicationId";
/**
* loadDefaultsFromMetadata will attempt to load certain settings (e.g., application ID, client token) from
* metadata in the app's AndroidManifest.xml. The client token will be read from this key.
*/
public static final String CLIENT_TOKEN_PROPERTY = "com.facebook.sdk.ClientToken";
private static Boolean sdkInitialized = false;
/**
* Initialize SDK
* This function will be called once in the application, it is tried to be called as early as possible;
* This is the place to register broadcast listeners.
*/
public static synchronized void sdkInitialize(Context context) {
if (sdkInitialized == true) {
return;
}
// Make sure we've loaded default settings if we haven't already.
Settings.loadDefaultsFromMetadataIfNeeded(context);
// Load app settings from network so that dialog configs are available
Utility.loadAppSettingsAsync(context, Settings.getApplicationId());
BoltsMeasurementEventListener.getInstance(context.getApplicationContext());
sdkInitialized = true;
}
/**
* Certain logging behaviors are available for debugging beyond those that should be
* enabled in production.
*
* Returns the types of extended logging that are currently enabled.
*
* @return a set containing enabled logging behaviors
*/
public static final Set<LoggingBehavior> getLoggingBehaviors() {
synchronized (loggingBehaviors) {
return Collections.unmodifiableSet(new HashSet<LoggingBehavior>(loggingBehaviors));
}
}
/**
* Certain logging behaviors are available for debugging beyond those that should be
* enabled in production.
*
* Enables a particular extended logging in the sdk.
*
* @param behavior
* The LoggingBehavior to enable
*/
public static final void addLoggingBehavior(LoggingBehavior behavior) {
synchronized (loggingBehaviors) {
loggingBehaviors.add(behavior);
}
}
/**
* Certain logging behaviors are available for debugging beyond those that should be
* enabled in production.
*
* Disables a particular extended logging behavior in the sdk.
*
* @param behavior
* The LoggingBehavior to disable
*/
public static final void removeLoggingBehavior(LoggingBehavior behavior) {
synchronized (loggingBehaviors) {
loggingBehaviors.remove(behavior);
}
}
/**
* Certain logging behaviors are available for debugging beyond those that should be
* enabled in production.
*
* Disables all extended logging behaviors.
*/
public static final void clearLoggingBehaviors() {
synchronized (loggingBehaviors) {
loggingBehaviors.clear();
}
}
/**
* Certain logging behaviors are available for debugging beyond those that should be
* enabled in production.
*
* Checks if a particular extended logging behavior is enabled.
*
* @param behavior
* The LoggingBehavior to check
* @return whether behavior is enabled
*/
public static final boolean isLoggingBehaviorEnabled(LoggingBehavior behavior) {
synchronized (loggingBehaviors) {
return Settings.isDebugEnabled() && loggingBehaviors.contains(behavior);
}
}
/**
* This method is deprecated. Use {@link Settings#isDebugEnabled()} instead.
*/
@Deprecated
public static final boolean isLoggingEnabled() {
return isDebugEnabled();
}
/**
* This method is deprecated. Use {@link Settings#setIsDebugEnabled(boolean)} instead.
*/
@Deprecated
public static final void setIsLoggingEnabled(boolean enabled) {
setIsDebugEnabled(enabled);
}
/**
* Indicates if we are in debug mode.
*/
public static final boolean isDebugEnabled() {
return isDebugEnabled;
}
/**
* Used to enable or disable logging, and other debug features. Defaults to BuildConfig.DEBUG.
* @param enabled Debug features (like logging) are enabled if true, disabled if false.
*/
public static final void setIsDebugEnabled(boolean enabled) {
isDebugEnabled = enabled;
}
/**
* Returns the Executor used by the SDK for non-AsyncTask background work.
*
* By default this uses AsyncTask Executor via reflection if the API level is high enough.
* Otherwise this creates a new Executor with defaults similar to those used in AsyncTask.
*
* @return an Executor used by the SDK. This will never be null.
*/
public static Executor getExecutor() {
synchronized (LOCK) {
if (Settings.executor == null) {
Executor executor = getAsyncTaskExecutor();
if (executor == null) {
executor = new ThreadPoolExecutor(DEFAULT_CORE_POOL_SIZE, DEFAULT_MAXIMUM_POOL_SIZE,
DEFAULT_KEEP_ALIVE, TimeUnit.SECONDS, DEFAULT_WORK_QUEUE, DEFAULT_THREAD_FACTORY);
}
Settings.executor = executor;
}
}
return Settings.executor;
}
/**
* Sets the Executor used by the SDK for non-AsyncTask background work.
*
* @param executor
* the Executor to use; must not be null.
*/
public static void setExecutor(Executor executor) {
Validate.notNull(executor, "executor");
synchronized (LOCK) {
Settings.executor = executor;
}
}
/**
* Gets the base Facebook domain to use when making Web requests; in production code this will always be
* "facebook.com".
*
* @return the Facebook domain
*/
public static String getFacebookDomain() {
return facebookDomain;
}
/**
* Sets the base Facebook domain to use when making Web requests. This defaults to "facebook.com", but may
* be overridden to, e.g., "beta.facebook.com" to direct requests at a different domain. This method should
* never be called from production code.
*
* @param facebookDomain the base domain to use instead of "facebook.com"
*/
public static void setFacebookDomain(String facebookDomain) {
if (!BuildConfig.DEBUG) {
Log.w(TAG, "WARNING: Calling setFacebookDomain from non-DEBUG code.");
}
Settings.facebookDomain = facebookDomain;
}
private static Executor getAsyncTaskExecutor() {
Field executorField = null;
try {
executorField = AsyncTask.class.getField("THREAD_POOL_EXECUTOR");
} catch (NoSuchFieldException e) {
return null;
}
Object executorObject = null;
try {
executorObject = executorField.get(null);
} catch (IllegalAccessException e) {
return null;
}
if (executorObject == null) {
return null;
}
if (!(executorObject instanceof Executor)) {
return null;
}
return (Executor) executorObject;
}
static void publishInstallAsync(final Context context, final String applicationId,
final Request.Callback callback) {
// grab the application context ahead of time, since we will return to the caller immediately.
final Context applicationContext = context.getApplicationContext();
Settings.getExecutor().execute(new Runnable() {
@Override
public void run() {
final Response response = Settings.publishInstallAndWaitForResponse(applicationContext, applicationId, false);
if (callback != null) {
// invoke the callback on the main thread.
Handler handler = new Handler(Looper.getMainLooper());
handler.post(new Runnable() {
@Override
public void run() {
callback.onCompleted(response);
}
});
}
}
});
}
/**
* Sets whether opening a Session should automatically publish install attribution to the Facebook graph.
*
* @param shouldAutoPublishInstall true to automatically publish, false to not
*
* This method is deprecated. See {@link AppEventsLogger#activateApp(Context, String)} for more info.
*/
@Deprecated
public static void setShouldAutoPublishInstall(boolean shouldAutoPublishInstall) {
Settings.shouldAutoPublishInstall = shouldAutoPublishInstall;
}
/**
* Gets whether opening a Session should automatically publish install attribution to the Facebook graph.
*
* @return true to automatically publish, false to not
*
* This method is deprecated. See {@link AppEventsLogger#activateApp(Context, String)} for more info.
*/
@Deprecated
public static boolean getShouldAutoPublishInstall() {
return shouldAutoPublishInstall;
}
static Response publishInstallAndWaitForResponse(
final Context context,
final String applicationId,
final boolean isAutoPublish) {
try {
if (context == null || applicationId == null) {
throw new IllegalArgumentException("Both context and applicationId must be non-null");
}
AttributionIdentifiers identifiers = AttributionIdentifiers.getAttributionIdentifiers(context);
SharedPreferences preferences = context.getSharedPreferences(ATTRIBUTION_PREFERENCES, Context.MODE_PRIVATE);
String pingKey = applicationId+"ping";
String jsonKey = applicationId+"json";
long lastPing = preferences.getLong(pingKey, 0);
String lastResponseJSON = preferences.getString(jsonKey, null);
// prevent auto publish from occurring if we have an explicit call.
if (!isAutoPublish) {
setShouldAutoPublishInstall(false);
}
GraphObject publishParams = GraphObject.Factory.create();
publishParams.setProperty(ANALYTICS_EVENT, MOBILE_INSTALL_EVENT);
Utility.setAppEventAttributionParameters(publishParams,
identifiers,
Utility.getHashedDeviceAndAppID(context, applicationId),
getLimitEventAndDataUsage(context));
publishParams.setProperty(AUTO_PUBLISH, isAutoPublish);
publishParams.setProperty("application_package_name", context.getPackageName());
String publishUrl = String.format(PUBLISH_ACTIVITY_PATH, applicationId);
Request publishRequest = Request.newPostRequest(null, publishUrl, publishParams, null);
if (lastPing != 0) {
GraphObject graphObject = null;
try {
if (lastResponseJSON != null) {
graphObject = GraphObject.Factory.create(new JSONObject(lastResponseJSON));
}
}
catch (JSONException je) {
// return the default graph object if there is any problem reading the data.
}
if (graphObject == null) {
return Response.createResponsesFromString("true", null, new RequestBatch(publishRequest), true).get(0);
} else {
return new Response(null, null, null, graphObject, true);
}
} else if (identifiers == null ||
(identifiers.getAndroidAdvertiserId() == null && identifiers.getAttributionId() == null)) {
throw new FacebookException("No attribution id available to send to server.");
} else {
if (!Utility.queryAppSettings(applicationId, false).supportsAttribution()) {
throw new FacebookException("Install attribution has been disabled on the server.");
}
Response publishResponse = publishRequest.executeAndWait();
// denote success since no error threw from the post.
SharedPreferences.Editor editor = preferences.edit();
lastPing = System.currentTimeMillis();
editor.putLong(pingKey, lastPing);
// if we got an object response back, cache the string of the JSON.
if (publishResponse.getGraphObject() != null &&
publishResponse.getGraphObject().getInnerJSONObject() != null) {
editor.putString(jsonKey, publishResponse.getGraphObject().getInnerJSONObject().toString());
}
editor.apply();
return publishResponse;
}
} catch (Exception e) {
// if there was an error, fall through to the failure case.
Utility.logd("Facebook-publish", e);
return new Response(null, null, new FacebookRequestError(null, e));
}
}
/**
* Acquire the current attribution id from the facebook app.
* @return returns null if the facebook app is not present on the phone.
*/
public static String getAttributionId(ContentResolver contentResolver) {
Cursor c = null;
try {
String [] projection = {ATTRIBUTION_ID_COLUMN_NAME};
c = contentResolver.query(ATTRIBUTION_ID_CONTENT_URI, projection, null, null, null);
if (c == null || !c.moveToFirst()) {
return null;
}
String attributionId = c.getString(c.getColumnIndex(ATTRIBUTION_ID_COLUMN_NAME));
return attributionId;
} catch (Exception e) {
Log.d(TAG, "Caught unexpected exception in getAttributionId(): " + e.toString());
return null;
} finally {
if (c != null) {
c.close();
}
}
}
/**
* Gets the application version to the provided string.
* @return application version set via setAppVersion.
*/
public static String getAppVersion() {
return appVersion;
}
/**
* Sets the application version to the provided string. AppEventsLogger.logEvent calls logs its event with the
* current app version, and App Insights allows breakdown of events by app version.
*
* @param appVersion The version identifier of the Android app that events are being logged through.
* Enables analysis and breakdown of logged events by app version.
*/
public static void setAppVersion(String appVersion) {
Settings.appVersion = appVersion;
}
/**
* Gets the current version of the Facebook SDK for Android as a string.
*
* @return the current version of the SDK
*/
public static String getSdkVersion() {
return FacebookSdkVersion.BUILD;
}
/**
* Gets whether data such as that generated through AppEventsLogger and sent to Facebook should be restricted from
* being used for purposes other than analytics and conversions, such as for targeting ads to this user. Defaults
* to false. This value is stored on the device and persists across app launches.
*
* @param context Used to read the value.
*/
public static boolean getLimitEventAndDataUsage(Context context) {
SharedPreferences preferences = context.getSharedPreferences(APP_EVENT_PREFERENCES, Context.MODE_PRIVATE);
return preferences.getBoolean("limitEventUsage", false);
}
/**
* Sets whether data such as that generated through AppEventsLogger and sent to Facebook should be restricted from
* being used for purposes other than analytics and conversions, such as for targeting ads to this user. Defaults
* to false. This value is stored on the device and persists across app launches. Changes to this setting will
* apply to app events currently queued to be flushed.
*
* @param context Used to persist this value across app runs.
*/
public static void setLimitEventAndDataUsage(Context context, boolean limitEventUsage) {
context.getSharedPreferences(APP_EVENT_PREFERENCES, Context.MODE_PRIVATE)
.edit()
.putBoolean("limitEventUsage", limitEventUsage)
.apply();
}
/**
* Gets the threshold used to report progress on requests.
*/
public static long getOnProgressThreshold() {
return onProgressThreshold.get();
}
/**
* Sets the threshold used to report progress on requests. Note that the value will be read when the
* request is started and can not be changed during a request (or batch) execution.
*
* @param threshold The number of bytes progressed to force a callback.
*/
public static void setOnProgressThreshold(long threshold) {
onProgressThreshold.set(threshold);
}
/**
* Gets whether the SDK is running in Platform Compatibility mode (i.e. making calls to v1.0 endpoints by default)
* The default is false.
*
* @return the value
*/
public static boolean getPlatformCompatibilityEnabled() {
return platformCompatibilityEnabled;
}
/**
* Sets whether the SDK is running in Platform Compatibility mode (i.e. making calls to v1.0 endpoints by default)
* The default is false. This is provided for apps that have strong reason not to take advantage of new
* capabilities in version 2.0+ of the API.
*
* @param platformCompatibilityEnabled whether to set Legacy Graph API mode
*/
public static void setPlatformCompatibilityEnabled(boolean platformCompatibilityEnabled) {
Settings.platformCompatibilityEnabled = platformCompatibilityEnabled;
}
/**
* Loads default values for certain settings from an application's AndroidManifest.xml metadata, if possible.
* If values have been explicitly set for a particular setting, they will not be overwritten. The following
* settings are currently loaded from metadata: APPLICATION_ID_PROPERTY, CLIENT_TOKEN_PROPERTY
* @param context the Context to use for loading metadata
*/
public static void loadDefaultsFromMetadata(Context context) {
defaultsLoaded = true;
if (context == null) {
return;
}
ApplicationInfo ai = null;
try {
ai = context.getPackageManager().getApplicationInfo(
context.getPackageName(), PackageManager.GET_META_DATA);
} catch (PackageManager.NameNotFoundException e) {
return;
}
if (ai == null || ai.metaData == null) {
return;
}
if (applicationId == null) {
applicationId = ai.metaData.getString(APPLICATION_ID_PROPERTY);
}
if (appClientToken == null) {
appClientToken = ai.metaData.getString(CLIENT_TOKEN_PROPERTY);
}
}
static void loadDefaultsFromMetadataIfNeeded(Context context) {
if (!defaultsLoaded) {
loadDefaultsFromMetadata(context);
}
}
public static String getApplicationSignature(Context context) {
if (context == null) {
return null;
}
PackageManager packageManager = context.getPackageManager();
if (packageManager == null) {
return null;
}
String packageName = context.getPackageName();
PackageInfo pInfo;
try {
pInfo = packageManager.getPackageInfo(packageName, PackageManager.GET_SIGNATURES);
} catch (PackageManager.NameNotFoundException e) {
return null;
}
Signature[] signatures = pInfo.signatures;
if (signatures == null || signatures.length == 0) {
return null;
}
MessageDigest md;
try {
md = MessageDigest.getInstance("SHA-1");
} catch (NoSuchAlgorithmException e) {
return null;
}
md.update(pInfo.signatures[0].toByteArray());
return Base64.encodeToString(md.digest(), Base64.URL_SAFE | Base64.NO_PADDING);
}
/**
* Gets the Facebook application ID for the current app. This will be null unless explicitly set or unless
* loadDefaultsFromMetadata has been called.
* @return the application ID
*/
public static String getApplicationId() {
return applicationId;
}
/**
* Sets the Facebook application ID for the current app.
* @param applicationId the application ID
*/
public static void setApplicationId(String applicationId) {
Settings.applicationId = applicationId;
}
/**
* Gets the client token for the current app. This will be null unless explicitly set or unless
* loadDefaultsFromMetadata has been called.
* @return the client token
*/
public static String getClientToken() {
return appClientToken;
}
/**
* Sets the Facebook client token for the current app.
* @param clientToken the client token
*/
public static void setClientToken(String clientToken) {
appClientToken = clientToken;
}
}
| 9,495 |
775 | <gh_stars>100-1000
// This file is part of MicropolisJ.
// Copyright (C) 2013 <NAME>
// Portions Copyright (C) 1989-2007 Electronic Arts Inc.
//
// MicropolisJ is free software; you can redistribute it and/or modify
// it under the terms of the GNU GPLv3, with additional terms.
// See the README file, included in this distribution, for details.
package micropolisj.engine;
/**
* Lists the disasters that the user can invoke.
*/
public enum Disaster
{
MONSTER,
FIRE,
FLOOD,
MELTDOWN,
TORNADO,
EARTHQUAKE;
}
| 168 |
2,504 | <reponame>dujianxin/Windows-universal-samples<filename>Samples/XamlBind/cpp/PhasingTests.xaml.cpp
//
// PhasingTests.xaml.cpp
// Implementation of the PhasingTests class
//
#include "pch.h"
#include "PhasingTests.xaml.h"
using namespace SDKTemplate;
using namespace concurrency;
using namespace Platform;
using namespace Windows::Foundation;
using namespace Windows::Foundation::Collections;
using namespace Windows::UI::Xaml;
using namespace Windows::UI::Xaml::Controls;
using namespace Windows::UI::Xaml::Controls::Primitives;
using namespace Windows::UI::Xaml::Data;
using namespace Windows::UI::Xaml::Input;
using namespace Windows::UI::Xaml::Media;
using namespace Windows::UI::Xaml::Navigation;
using namespace Windows::Storage::Pickers;
using namespace Windows::Storage;
// The Blank Page item template is documented at http://go.microsoft.com/fwlink/?LinkId=234238
PhasingTests::PhasingTests()
{
InitializeComponent();
CreateTestData();
initialized = true;
}
void PhasingTests::CreateTestData()
{
create_task(dataSource->SetupDataSourceUsingPicturesFolder()).then([this]()
{
this->LoadingPanel->Visibility = Windows::UI::Xaml::Visibility::Collapsed;
this->myGridView->ItemsSource = dataSource;
});
}
void PhasingTests::Reset_Click(Object^ sender, Windows::UI::Xaml::RoutedEventArgs^ e)
{
dataSource->ResetCollection();
}
void PhasingTests::ChangeFolderClick(Object^ sender, Windows::UI::Xaml::RoutedEventArgs^ e)
{
FolderPicker^ picker = ref new FolderPicker();
picker->SuggestedStartLocation = PickerLocationId::PicturesLibrary;
picker->FileTypeFilter->Append(".jpg");
picker->FileTypeFilter->Append(".png");
create_task(picker->PickSingleFolderAsync()).then([this](StorageFolder^ f)
{
dataSource = ref new xBindSampleModel::FileDataSource();
this->myGridView->ItemsSource = dataSource;
LoadingPanel->Visibility = Windows::UI::Xaml::Visibility::Visible;
create_task(dataSource->SetupDataSource(f)).then([this]()
{
this->LoadingPanel->Visibility = Windows::UI::Xaml::Visibility::Collapsed;
});
});
}
void PhasingTests::SlowPhasing_UnChecked(Object^ sender, Windows::UI::Xaml::RoutedEventArgs^ e)
{
SlowPhasing_Checked(sender, e);
}
void PhasingTests::SlowPhasing_Checked(Object^ sender, Windows::UI::Xaml::RoutedEventArgs^ e)
{
if (initialized)
{
if (SlowPhasing->IsChecked->Value)
{
_CCCToken = myGridView->ContainerContentChanging += ref new TypedEventHandler<ListViewBase^, ContainerContentChangingEventArgs^>(this, &PhasingTests::myGridView_ContainerContentChanging);
}
else
{
myGridView->ContainerContentChanging -= _CCCToken;
}
}
}
void PhasingTests::PhasedTemplate_Checked(Object^ sender, Windows::UI::Xaml::RoutedEventArgs^ e)
{
if (initialized)
{
if (RadioPhasedTempl->IsChecked->Value == true)
{
myGridView->ItemTemplate = dynamic_cast<DataTemplate^>(Resources->Lookup("PhasedFileTemplate"));
}
else if (RadioxBindTempl->IsChecked->Value)
{
myGridView->ItemTemplate = dynamic_cast<DataTemplate^>(Resources->Lookup("ClassicBindingFileTemplate"));
}
else if (RadioClassicTempl->IsChecked->Value)
{
myGridView->ItemTemplate = dynamic_cast<DataTemplate^>(Resources->Lookup("ClassicBindingFileTemplate"));
}
}
}
void PhasingTests::myGridView_ContainerContentChanging(ListViewBase^ sender, ContainerContentChangingEventArgs^ args)
{
if (args->Phase < 10) args->RegisterUpdateCallback(ref new TypedEventHandler<ListViewBase^, ContainerContentChangingEventArgs^>(this, &PhasingTests::myGridView_ContainerContentChanging));
Sleep(1);
}
void PhasingTests::DataSource_VectorChanged(Windows::Foundation::Collections::IObservableVector<xBindSampleModel::FileItem^>^ sender, Windows::Foundation::Collections::IVectorChangedEventArgs^ e)
{
this->NoItemsPanel->Visibility = (dataSource->Count == 0) ? Windows::UI::Xaml::Visibility::Visible : Windows::UI::Xaml::Visibility::Collapsed;
}
void PhasingTests::Sleep(int timeout)
{
//This function should not be used in real apps, it blocks the UI thread for a timeout. It is used in this sample to illustrate the effects of phasing.
auto eventHandle = ::CreateEventEx(nullptr, nullptr, CREATE_EVENT_MANUAL_RESET, 0);
::WaitForSingleObjectEx(eventHandle, timeout, FALSE);
} | 1,720 |
545 | <reponame>FWangTrading/Cpp-Primer-5th-Exercises
#include <iterator>
#include <vector>
#include <string>
#include <fstream>
#include <iostream>
int main() {
std::string filename;
std::cin >> filename;
std::ifstream in(filename);
if (!in.is_open()) {
std::cerr << "Can not open file: " << filename << std::endl;
return -1;
}
std::istream_iterator<std::string> iter(in), eof;
//std::vector<std::string> vs;
//while (iter != eof)
// vs.push_back(*iter++);
std::vector<std::string> vs(iter, eof); // The shorter way
for (const auto &s: vs)
std::cout << s << std::endl;
return 0;
}
| 245 |
488 | <filename>src/roseIndependentSupport/visualization/vizgroup.cpp
/******************************************************************************
*
* DOT3D - An OpenGL dot file viewer
* <NAME>, 2003
*
* visualizer FLTK group
*
*****************************************************************************/
#include "vizgroup.h"
// menu callback functions
void menuCallback(Fl_Widget *w, void *v);
// menu contents for each visualizer window
#define VMENU_UNDEFINED 0
#define VMENU_LOAD 1
#define VMENU_CLOSE 2
#define VMENU_DETAIL_HIGH 3
#define VMENU_DETAIL_MED 4
#define VMENU_DETAIL_LOW 5
Fl_Menu_Item vizMenuItems[] = {
{ "&Graph", 0,0,0, FL_SUBMENU },
{ "&Load", 0, (Fl_Callback *)DotVisualizer::menuLoadCB },
{ "&Close", 0, (Fl_Callback *)DotVisualizer::menuCloseCB },
//{ "&Test", 0, (Fl_Callback *)menuCallback , (void *)1 },
{ 0 },
{ "&Layout", 0,0,0, FL_SUBMENU },
{ "&Dot", 0, (Fl_Callback *)DotVisualizer::menuPerformDotLayout },
{ "&Neato", 0, (Fl_Callback *)DotVisualizer::menuPerformNeatoLayout },
//{ "&Test", 0, (Fl_Callback *)menuCallback , (void *)1 },
{ 0 },
{ "&Display", 0,0,0, FL_SUBMENU },
{ "&Lock", 0, (Fl_Callback *)DotVisualizer::menuToggleLock, (void *)0 , FL_MENU_TOGGLE },
{ "&Show FPS", 0, (Fl_Callback *)DotVisualizer::menuToggleFPS , (void *)0 , FL_MENU_TOGGLE | FL_MENU_DIVIDER },
//{ "New File", 0, (Fl_Callback *)menuCallback , (void *)11 , FL_MENU_DIVIDER },
{ "&Detail", 0,0,0, FL_SUBMENU },
{ "&High", 0, (Fl_Callback *)DotVisualizer::menuSetDetailHigh },
{ "&Normal", 0, (Fl_Callback *)DotVisualizer::menuSetDetailMed },
{ "&Low", 0, (Fl_Callback *)DotVisualizer::menuSetDetailLow },
{ 0 },
{ 0 },
{ 0 }
};
void menuCallback(Fl_Widget *w, void *v) {
//cout << " CB " << (int)v << endl;
/* FIXME
int vi = (int)v / 100;
int event = (int)v - vi*100;
if(vi>MAX_VIZ) {
cerr << "Warning - invalid viz id: e"<< event <<" for "<< vi <<", " << (int)v << endl;
return;
}
if(!mVizActive[vi]) {
cerr << "Warning - invalid viz (not active): e"<< event <<" for "<< vi <<", " << (int)v << endl;
return;
}
switch(event) {
case VMENU_LOAD: {
Fl_File_Chooser *choose = new Fl_File_Chooser(".", "DOT File (*.dot)\tDOT File with Layout (*.pdot)", Fl_File_Chooser::SINGLE, "Load DOT Graph");
choose->show();
while(choose->visible()) Fl::wait();
if(!choose->value()) break;
string filename( choose->value() );
viz[vi]->loadFile( filename );
} break;
case VMENU_CLOSE: {
viz[vi]->closeGraph();
} break;
default:
cerr << "Warning - invalid viz menu id: e"<< event <<" for "<< vi <<", " << (int)v << endl;
break;
}
*/
}
//-----------------------------------------------------------------------------
//! constructor
VizGroup::VizGroup(int x,int y,int w,int h) :
Fl_Tile( x,y, w,h )
{
for(int i=0; i<MAX_VIZ; i++) {
mVizActive[i] = false;
mVizVisible[i] = false;
mpViz[i]= NULL;
mpGroup[i]= NULL;
}
end();
}
//-----------------------------------------------------------------------------
//! destructor
VizGroup::~VizGroup()
{
for(int i=0; i<MAX_VIZ; i++) {
if(mVizActive[i]) {
delete mpViz[i];
}
}
};
//-----------------------------------------------------------------------------
void VizGroup::addViz(int x,int y,int w,int h, int vi,Agraph_t *G)
{
Fl_Group *group;
//DotVisualizer *viz = new DotVisualizer( G, 20, 40+100+20, 260, 220 );
DotVisualizer *vizz = NULL;
int menuHeight = 24;
group = new Fl_Group( x,y, w,h );
// add border
Fl_Box *border = new Fl_Box( FL_ENGRAVED_BOX, x,y, w,h, "" );
border->show();
x += 3;
y += 3;
w -= 6;
h -= 6;
if(!mVizActive[vi]) {
vizz = new DotVisualizer( G, x, y +menuHeight, w, h-menuHeight );
vizz->callback( DotVisualizer::staticCallback, vizz );
vizz->setGraphInfo( mpGraphInfo );
//vizz->loadFile( string("./prelayout_small.dot")); // DEBUG std init
mpViz[vi] = vizz;
} else {
vizz = mpViz[vi];
vizz->resize( x,y +menuHeight, w, h-menuHeight );
group->add( vizz );
//group = mpGroup[vi];
//group->resize( x,y, w,h );
}
Fl_Menu_Bar *menu = new Fl_Menu_Bar( x, y, w, menuHeight );
for(size_t l=0; l< sizeof(vizMenuItems)/sizeof(Fl_Menu_Item); l++) {
vizMenuItems[l].user_data_ = (void *)vizz;
}
menu->copy( vizMenuItems );
mpGroup[vi] = group;
group->resizable( vizz );
group->end();
group->show();
menu->show();
mVizActive[vi] = true;
mVizVisible[vi] = true;
//add(group);
}
//-----------------------------------------------------------------------------
//! init the vizgroup layout
void VizGroup::vizLayout(VizGroupLayout slayout)
{
int x=this->x(), y=this->y();
for(int i=0; i<MAX_VIZ; i++) {
if((mVizActive[i])&&(mVizVisible[i])) {
mpGroup[i]->remove( mpViz[i] );
// hide them - this is important, otherwise they will still be drawn somewhere...
mpViz[i]->hide();
}
mVizVisible[i] = false;
mpGroup[i] = NULL;
}
clear();
current(this);
switch(slayout) {
case single:
addViz( x, y, w(), h() ,0,NULL);
break;
case splitHoriz:
addViz( x, y, w(), h()/2 ,0,NULL);
addViz( x, y+h()/2, w(), h()/2 ,1,NULL);
break;
case splitVert:
addViz( x, y, w()/2, h() ,0,NULL);
addViz( x+w()/2, y, w()/2, h() ,1,NULL);
break;
case triple:
addViz( x+w()/3*1, y, w()/3*2, h() ,0,NULL);
addViz( x+w()/3*0, y+h()/2*0, w()/3, h()/2 ,1,NULL);
addViz( x+w()/3*0, y+h()/2*1, w()/3, h()/2 ,2,NULL);
break;
case quad:
addViz( x , y , w()/2, h()/2 ,0,NULL);
addViz( x+w()/2, y , w()/2, h()/2 ,1,NULL);
addViz( x , y+h()/2, w()/2, h()/2 ,2,NULL);
addViz( x+w()/2, y+h()/2, w()/2, h()/2 ,3,NULL);
break;
case threeone:
addViz( x+w()/4*1, y, w()/4*3, h() ,0,NULL);
addViz( x , y+h()/3*0, w()/4, h()/3 ,1,NULL);
addViz( x , y+h()/3*1, w()/4, h()/3 ,2,NULL);
addViz( x , y+h()/3*2, w()/4, h()/3 ,3,NULL);
break;
default:
break;
}
end();
focus( mpViz[0] );
}
//-----------------------------------------------------------------------------
//! redraw all visualizers when idle
void VizGroup::doIdleRedraw()
{
for(int i=0; i<MAX_VIZ; i++) {
if(mVizVisible[i]) {
//cerr << "idle " << i <<endl;
mpViz[i]->idle();
mpViz[i]->redraw();
}
}
}
//-----------------------------------------------------------------------------
//! show all viz's
void VizGroup::showAll()
{
for(int i=0; i<MAX_VIZ; i++) {
if(mVizVisible[i]) {
mpGroup[i]->show();
// for some reason, the visualizers have to be explicitly shown...
mpViz[i]->show();
}
}
show();
}
//-----------------------------------------------------------------------------
//! load multiple files for startup
void VizGroup::vizMultipleLoad( vector<string> files )
{
if(files.size()==0) {
// do default empty layout...
vizLayout( triple );
return;
}
// layout depending on no. of files
switch(files.size()) {
case 1:
vizLayout( single );
break;
case 2:
vizLayout( splitHoriz );
break;
case 3:
vizLayout( triple );
break;
default:
vizLayout( quad );
break;
}
// load them
for(size_t i=0; ((i<files.size())&&(i<4)); i++) {
if(mVizVisible[i]) {
mpViz[i]->loadFile( files[i] );
}
}
}
| 6,217 |
347 | <gh_stars>100-1000
package org.ovirt.engine.ui.uicommonweb.builders.vm;
import org.ovirt.engine.core.common.businessentities.VmBase;
import org.ovirt.engine.ui.uicommonweb.builders.BaseSyncBuilder;
import org.ovirt.engine.ui.uicommonweb.models.vms.UnitVmModel;
public class NameUnitToVmBaseBuilder<T extends VmBase> extends BaseSyncBuilder<UnitVmModel, T> {
@Override
protected void build(UnitVmModel model, VmBase vm) {
vm.setName(model.getName().getEntity());
}
}
| 187 |
3,428 | <gh_stars>1000+
{
"protocol": "https",
"hostname": "api.github.com",
"port": null,
"pathname": "/repos/<slug>/actions/workflows/<id>/dispatches",
"method": "POST",
"ref": "main",
"useragent": "https://github.com/stdlib-js/stdlib/@stdlib/_tools/github/dispatch-workflow",
"accept": "application/vnd.github.v3+json"
}
| 138 |
784 | <gh_stars>100-1000
package com.gplibs.magicsurfaceview;
import android.graphics.Bitmap;
import android.graphics.Rect;
import android.view.View;
public class MagicSurface extends MagicBaseSurface<MagicSurface> {
private SurfaceModel mModel;
private boolean mDrawGrid = false;
private int mRowLineCount = 30;
private int mColLineCount = 30;
private float[] mMatrix = new float[16];
private MagicSurfaceMatrixUpdater mMatrixUpdater;
private MagicSurfaceModelUpdater mModelUpdater;
/**
* 构造函数
* @param view 需要进行动画的View
*/
public MagicSurface(View view) {
super(view);
}
/**
* 构造函数
* @param body 需要进行动画的Bitmap
* @param rect 相对于MagicSurfaceView的Rect
*/
public MagicSurface(Bitmap body, Rect rect) {
super(body, rect);
}
/**
* 获取MagicSurface的矩形网格模型
* @return 模型
*/
public SurfaceModel getModel() {
return mModel;
}
/**
* 设置模型更新器
* @param updater 模型更新器
* @return Surface
*/
public MagicSurface setModelUpdater(MagicSurfaceModelUpdater updater) {
this.mModelUpdater = updater;
if (mModelUpdater != null) {
mModelUpdater.mSurface = this;
}
return this;
}
/**
* 设置矩阵更新器
* @param updater 矩阵更新器
* @return Surface
*/
public MagicSurface setMatrixUpdater(MagicSurfaceMatrixUpdater updater) {
this.mMatrixUpdater = updater;
if (mMatrixUpdater != null) {
mMatrixUpdater.mSurface = this;
}
return this;
}
/**
* 设置网格模型密度
* @param rowLineCount 行数 (默认为30)
* @param colLineCount 列数 (默认为30)
* @return Surface
*/
public MagicSurface setGrid(int rowLineCount, int colLineCount) {
mRowLineCount = rowLineCount;
mColLineCount = colLineCount;
if (mModel != null) {
mModel.update(colLineCount, rowLineCount, mModel.getWidth(), mModel.getHeight());
}
return this;
}
/**
* 绘制时是否只绘制网格
* @param drawGrid 是否绘制网格 (默认为false)
* @return Surface
*/
public MagicSurface drawGrid(boolean drawGrid) {
mDrawGrid = drawGrid;
if (mModel != null) {
mModel.drawGrid(mDrawGrid);
}
return this;
}
@Override
void setProgram(Program program) {
mModel.setProgram(program);
super.setProgram(program);
}
@Override
protected void updateModel(Vec size, Vec offset) {
mModel = new SurfaceModel(mColLineCount, mRowLineCount, size.width(), size.height());
mModel.drawGrid(mDrawGrid);
mModel.setOffset(offset.x(), offset.y(), offset.z());
}
@Override
void restore() {
if (mModelUpdater != null && mModelUpdater.isStopped()) {
mModelUpdater.start();
}
if (mMatrixUpdater != null && mMatrixUpdater.isStopped()) {
mMatrixUpdater.start();
}
super.restore();
}
@Override
synchronized void release() {
super.release();
mModel = null;
}
@Override
void stop() {
if (mModelUpdater != null) {
mModelUpdater.stop();
}
if (mMatrixUpdater != null) {
mMatrixUpdater.stop();
}
}
@Override
protected boolean runOnDraw(MatrixManager matrixManager) {
if (mModelUpdater != null) {
mModelUpdater.runOnDraw();
}
if (mMatrixUpdater != null) {
mMatrixUpdater.runOnDraw();
}
if (MagicUpdater.prepareUpdater(mModelUpdater) && MagicUpdater.prepareUpdater(mMatrixUpdater)) {
if (mModelUpdater == null && mMatrixUpdater == null) {
MatrixManager.reset(mMatrix);
ReusableVec offset = VecPool.get(3);
mModel.getOffset(offset);
MatrixManager.translateM(mMatrix, offset.x(), offset.y(), offset.z());
offset.free();
matrixManager.setModelMatrix(mMatrix);
} else if (mMatrixUpdater != null) {
try {
mMatrixUpdater.lock();
matrixManager.setModelMatrix(getModel().getMatrix());
} finally {
mMatrixUpdater.unlock();
}
} else {
MatrixManager.reset(mMatrix);
matrixManager.setModelMatrix(mMatrix);
}
mModel.runOnDraw();
return true;
}
return false;
}
@Override
protected void drawModel(MatrixManager matrixManager) {
mModel.draw();
}
@Override
protected void doUpdaterStartedAndStopped() {
MagicUpdater.doStartedAndStopped(mModelUpdater);
MagicUpdater.doStartedAndStopped(mMatrixUpdater);
}
} | 2,475 |
1,219 | <gh_stars>1000+
/*
* Copyright 2013 Bazaarvoice, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.bazaarvoice.jolt.chainr;
import com.bazaarvoice.jolt.Chainr;
import com.bazaarvoice.jolt.ContextualTransform;
import com.bazaarvoice.jolt.JoltTransform;
import com.bazaarvoice.jolt.JsonUtils;
import com.bazaarvoice.jolt.Transform;
import com.bazaarvoice.jolt.chainr.transforms.TransformTestResult;
import com.bazaarvoice.jolt.exception.SpecException;
import com.bazaarvoice.jolt.exception.TransformException;
import com.beust.jcommander.internal.Lists;
import org.testng.Assert;
import org.testng.annotations.DataProvider;
import org.testng.annotations.Test;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
public class ChainrInitializationTest {
@DataProvider
public Object[][] badTransforms() {
return new Object[][] {
{JsonUtils.classpathToObject( "/json/chainr/transforms/bad_transform_loadsExplodingTransform.json" )}
};
}
@Test(dataProvider = "badTransforms", expectedExceptions = TransformException.class )
public void testBadTransforms(Object chainrSpec) {
Chainr unit = Chainr.fromSpec( chainrSpec );
unit.transform( new HashMap(), null );// should fail here
Assert.fail( "Should not have gotten here" );
}
@DataProvider
public Object[][] passingTestCases() {
return new Object[][] {
{new Object(), JsonUtils.classpathToObject( "/json/chainr/transforms/loadsGoodTransform.json" )}
};
}
@Test(dataProvider = "passingTestCases" )
public void testPassing(Object input, Object spec) {
Chainr unit = Chainr.fromSpec( spec );
TransformTestResult actual = (TransformTestResult) unit.transform( input, null );
Assert.assertEquals( input, actual.input );
Assert.assertNotNull( actual.spec );
}
@Test( expectedExceptions = IllegalArgumentException.class )
public void chainrBuilderFailsOnNullLoader() {
Object validSpec = JsonUtils.classpathToObject( "/json/chainr/transforms/loadsGoodTransform.json" );
new ChainrBuilder( validSpec ).loader( null );
}
@Test( expectedExceptions = IllegalArgumentException.class )
public void failsOnNullListOfJoltTransforms() {
new Chainr( null );
}
@Test( expectedExceptions = SpecException.class )
public void failsOnStupidTransform() {
List<JoltTransform> badSpec = Lists.newArrayList();
// Stupid JoltTransform that implements the base interface, and not one of the useful ones
badSpec.add( new JoltTransform() {} );
new Chainr( badSpec );
}
@Test( expectedExceptions = SpecException.class )
public void failsOnOverEagerTransform() {
List<JoltTransform> badSpec = Lists.newArrayList();
// Stupid JoltTransform that implements both "real" interfaces
badSpec.add( new OverEagerTransform() );
new Chainr( badSpec );
}
private static class OverEagerTransform implements Transform, ContextualTransform {
@Override
public Object transform( Object input, Map<String, Object> context ) {
return null;
}
@Override
public Object transform( Object input ) {
return null;
}
}
}
| 1,336 |
1,742 | <filename>src/sage/combinat/root_system/fusion_ring.py
"""
Fusion Rings
"""
# ****************************************************************************
# Copyright (C) 2019 <NAME> <bump at match.stanford.edu>
# <NAME> <gh_willieab>
# <NAME> <tcscrims at gmail.com>
# <NAME> <nthiery at users.sf.net>
#
# Distributed under the terms of the GNU General Public License (GPL)
# https://www.gnu.org/licenses/
# ****************************************************************************
from sage.combinat.root_system.weyl_characters import WeylCharacterRing
from sage.combinat.q_analogues import q_int
from sage.matrix.special import diagonal_matrix
from sage.matrix.constructor import matrix
from sage.misc.misc import inject_variable
from sage.rings.integer_ring import ZZ
from sage.rings.number_field.number_field import CyclotomicField
from sage.misc.cachefunc import cached_method
class FusionRing(WeylCharacterRing):
r"""
Return the Fusion Ring (Verlinde Algebra) of level ``k``.
INPUT:
- ``ct`` -- the Cartan type of a simple (finite-dimensional) Lie algebra
- ``k`` -- a nonnegative integer
- ``conjugate`` -- (default ``False``) set ``True`` to obtain
the complex conjugate ring
- ``cyclotomic_order`` -- (default computed depending on ``ct`` and ``k``)
The cyclotomic order is an integer `N` such that all computations
will return elements of the cyclotomic field of `N`-th roots of unity.
Normally you will never need to change this but consider changing it
if :meth:`root_of_unity` ever returns ``None``.
This algebra has a basis (sometimes called *primary fields* but here
called *simple objects*) indexed by the weights of level `\leq k`.
These arise as the fusion algebras of Wess-Zumino-Witten (WZW) conformal
field theories, or as Grothendieck groups of tilting modules for quantum
groups at roots of unity. The :class:`FusionRing` class is implemented as
a variant of the :class:`WeylCharacterRing`.
REFERENCES:
- [BaKi2001]_ Chapter 3
- [DFMS1996]_ Chapter 16
- [EGNO2015]_ Chapter 8
- [Feingold2004]_
- [Fuchs1994]_
- [Row2006]_
- [Walton1990]_
- [Wan2010]_
EXAMPLES::
sage: A22 = FusionRing("A2",2)
sage: [f1, f2] = A22.fundamental_weights()
sage: M = [A22(x) for x in [0*f1, 2*f1, 2*f2, f1+f2, f2, f1]]
sage: [M[3] * x for x in M]
[A22(1,1),
A22(0,1),
A22(1,0),
A22(0,0) + A22(1,1),
A22(0,1) + A22(2,0),
A22(1,0) + A22(0,2)]
You may assign your own labels to the basis elements. In the next
example, we create the `SO(5)` fusion ring of level `2`, check the
weights of the basis elements, then assign new labels to them while
injecting them into the global namespace::
sage: B22 = FusionRing("B2", 2)
sage: b = [B22(x) for x in B22.get_order()]; b
[B22(0,0), B22(1,0), B22(0,1), B22(2,0), B22(1,1), B22(0,2)]
sage: [x.weight() for x in b]
[(0, 0), (1, 0), (1/2, 1/2), (2, 0), (3/2, 1/2), (1, 1)]
sage: B22.fusion_labels(['I0','Y1','X','Z','Xp','Y2'], inject_variables=True)
sage: b = [B22(x) for x in B22.get_order()]; b
[I0, Y1, X, Z, Xp, Y2]
sage: [(x, x.weight()) for x in b]
[(I0, (0, 0)),
(Y1, (1, 0)),
(X, (1/2, 1/2)),
(Z, (2, 0)),
(Xp, (3/2, 1/2)),
(Y2, (1, 1))]
sage: X * Y1
X + Xp
sage: Z * Z
I0
A fixed order of the basis keys is available with :meth:`get_order`.
This is the order used by methods such as :meth:`s_matrix`. You may
use :meth:`CombinatorialFreeModule.set_order` to reorder the basis::
sage: B22.set_order([x.weight() for x in [I0,Y1,Y2,X,Xp,Z]])
sage: [B22(x) for x in B22.get_order()]
[I0, Y1, Y2, X, Xp, Z]
To reset the labels, you may run :meth:`fusion_labels` with no parameter::
sage: B22.fusion_labels()
sage: [B22(x) for x in B22.get_order()]
[B22(0,0), B22(1,0), B22(0,2), B22(0,1), B22(1,1), B22(2,0)]
To reset the order to the default, simply set it to the list of basis
element keys::
sage: B22.set_order(B22.basis().keys().list())
sage: [B22(x) for x in B22.get_order()]
[B22(0,0), B22(1,0), B22(0,1), B22(2,0), B22(1,1), B22(0,2)]
The fusion ring has a number of methods that reflect its role
as the Grothendieck ring of a *modular tensor category* (MTC). These
include twist methods :meth:`Element.twist` and :meth:`Element.ribbon`
for its elements related to the ribbon structure, and the
S-matrix :meth:`s_ij`.
There are two natural normalizations of the S-matrix. Both
are explained in Chapter 3 of [BaKi2001]_. The one that is computed
by the method :meth:`s_matrix`, or whose individual entries
are computed by :meth:`s_ij` is denoted `\tilde{s}` in
[BaKi2001]_. It is not unitary.
The unitary S-matrix is `s=D^{-1/2}\tilde{s}` where
.. MATH::
D = \sum_V d_i(V)^2.
The sum is over all simple objects `V` with
`d_i(V)` the *quantum dimension*. We will call quantity `D`
the *global quantum dimension* and `\sqrt{D}` the
*total quantum order*. They are computed by :meth:`global_q_dimension`
and :meth:`total_q_order`. The unitary S-matrix `s` may be obtained
using :meth:`s_matrix` with the option ``unitary=True``.
Let us check the Verlinde formula, which is [DFMS1996]_ (16.3). This
famous identity states that
.. MATH::
N^k_{ij} = \sum_l \frac{s(i,\ell)\,s(j,\ell)\,\overline{s(k,\ell)}}{s(I,\ell)},
where `N^k_{ij}` are the fusion coefficients, i.e. the structure
constants of the fusion ring, and ``I`` is the unit object.
The S-matrix has the property that if `i*` denotes the dual
object of `i`, implemented in Sage as ``i.dual()``, then
.. MATH::
s(i*,j) = s(i,j*) = \overline{s(i,j)}.
This is equation (16.5) in [DFMS1996]_. Thus with `N_{ijk}=N^{k*}_{ij}`
the Verlinde formula is equivalent to
.. MATH::
N_{ijk} = \sum_l \frac{s(i,\ell)\,s(j,\ell)\,s(k,\ell)}{s(I,\ell)},
In this formula `s` is the normalized unitary S-matrix
denoted `s` in [BaKi2001]_. We may define a function that
corresponds to the right-hand side, except using
`\tilde{s}` instead of `s`::
sage: def V(i,j,k):
....: R = i.parent()
....: return sum(R.s_ij(i,l) * R.s_ij(j,l) * R.s_ij(k,l) / R.s_ij(R.one(),l)
....: for l in R.basis())
This does not produce ``self.N_ijk(i,j,k)`` exactly, because of the
missing normalization factor. The following code to check the
Verlinde formula takes this into account::
sage: def test_verlinde(R):
....: b0 = R.one()
....: c = R.global_q_dimension()
....: return all(V(i,j,k) == c * R.N_ijk(i,j,k) for i in R.basis()
....: for j in R.basis() for k in R.basis())
Every fusion ring should pass this test::
sage: test_verlinde(FusionRing("A2",1))
True
sage: test_verlinde(FusionRing("B4",2)) # long time (.56s)
True
As an exercise, the reader may verify the examples in
Section 5.3 of [RoStWa2009]_. Here we check the example
of the Ising modular tensor category, which is related
to the BPZ minimal model `M(4,3)` or to an `E_8` coset
model. See [DFMS1996]_ Sections 7.4.2 and 18.4.1.
[RoStWa2009]_ Example 5.3.4 tells us how to
construct it as the conjugate of the `E_8` level 2
:class:`FusionRing`::
sage: I = FusionRing("E8",2,conjugate=True)
sage: I.fusion_labels(["i0","p","s"],inject_variables=True)
sage: b = I.basis().list(); b
[i0, p, s]
sage: [[x*y for x in b] for y in b]
[[i0, p, s], [p, i0, s], [s, s, i0 + p]]
sage: [x.twist() for x in b]
[0, 1, 1/8]
sage: [x.ribbon() for x in b]
[1, -1, zeta128^8]
sage: [I.r_matrix(i, j, k) for (i,j,k) in [(s,s,i0), (p,p,i0), (p,s,s), (s,p,s), (s,s,p)]]
[-zeta128^56, -1, -zeta128^32, -zeta128^32, zeta128^24]
sage: I.r_matrix(s, s, i0) == I.root_of_unity(-1/8)
True
sage: I.global_q_dimension()
4
sage: I.total_q_order()
2
sage: [x.q_dimension()^2 for x in b]
[1, 1, 2]
sage: I.s_matrix()
[ 1 1 -zeta128^48 + zeta128^16]
[ 1 1 zeta128^48 - zeta128^16]
[-zeta128^48 + zeta128^16 zeta128^48 - zeta128^16 0]
sage: I.s_matrix().apply_map(lambda x:x^2)
[1 1 2]
[1 1 2]
[2 2 0]
The term *modular tensor category* refers to the fact that associated
with the category there is a projective representation of the modular
group `SL(2,\ZZ)`. We recall that this group is generated by
.. MATH::
S = \begin{pmatrix} & -1\\1\end{pmatrix},\qquad
T = \begin{pmatrix} 1 & 1\\ &1 \end{pmatrix}
subject to the relations `(ST)^3 = S^2`, `S^2T = TS^2`, and `S^4 = I`.
Let `s` be the normalized S-matrix, and
`t` the diagonal matrix whose entries are the twists of the simple
objects. Let `s` the unitary S-matrix and `t` the matrix of twists,
and `C` the conjugation matrix :meth:`conj_matrix`. Let
.. MATH::
D_+ = \sum_i d_i^2 \theta_i, \qquad D_- = d_i^2 \theta_i^{-1},
where `d_i` and `\theta_i` are the quantum dimensions and twists of the
simple objects. Let `c` be the Virasoro central charge, a rational number
that is computed in :meth:`virasoro_central_charge`. It is known that
.. MATH::
\sqrt{\frac{D_+}{D_-}} = e^{i\pi c/4}.
It is proved in [BaKi2001]_ Equation (3.1.17) that
.. MATH::
(st)^3 = e^{i\pi c/4} s^2, \qquad
s^2 = C, \qquad C^2 = 1, \qquad Ct = tC.
Therefore `S \mapsto s, T \mapsto t` is a projective representation
of `SL(2, \ZZ)`. Let us confirm these identities for the Fibonacci MTC
``FusionRing("G2", 1)``::
sage: R = FusionRing("G2",1)
sage: S = R.s_matrix(unitary=True)
sage: T = R.twists_matrix()
sage: C = R.conj_matrix()
sage: c = R.virasoro_central_charge(); c
14/5
sage: (S*T)^3 == R.root_of_unity(c/4) * S^2
True
sage: S^2 == C
True
sage: C*T == T*C
True
"""
@staticmethod
def __classcall__(cls, ct, k, base_ring=ZZ, prefix=None, style="coroots", conjugate=False, cyclotomic_order=None):
"""
Normalize input to ensure a unique representation.
TESTS::
sage: F1 = FusionRing('B3', 2)
sage: F2 = FusionRing(CartanType('B3'), QQ(2), ZZ)
sage: F3 = FusionRing(CartanType('B3'), int(2), style="coroots")
sage: F1 is F2 and F2 is F3
True
sage: A23 = FusionRing('A2', 3)
sage: TestSuite(A23).run()
sage: B22 = FusionRing('B2', 2)
sage: TestSuite(B22).run()
sage: C31 = FusionRing('C3', 1)
sage: TestSuite(C31).run()
sage: D41 = FusionRing('D4', 1)
sage: TestSuite(D41).run()
sage: G22 = FusionRing('G2', 2)
sage: TestSuite(G22).run()
sage: F41 = FusionRing('F4', 1)
sage: TestSuite(F41).run()
sage: E61 = FusionRing('E6', 1)
sage: TestSuite(E61).run()
sage: E71 = FusionRing('E7', 1)
sage: TestSuite(E71).run()
sage: E81 = FusionRing('E8', 1)
sage: TestSuite(E81).run()
"""
return super(FusionRing, cls).__classcall__(cls, ct, base_ring=base_ring,
prefix=prefix, style=style, k=k,
conjugate=conjugate,
cyclotomic_order=cyclotomic_order)
def _test_verlinde(self, **options):
"""
Check the Verlinde formula for this :class:`FusionRing` instance.
EXAMPLES::
sage: G22 = FusionRing("G2",2)
sage: G22._test_verlinde()
"""
tester = self._tester(**options)
c = self.global_q_dimension()
i0 = self.one()
from sage.misc.misc import some_tuples
B = self.basis()
for x,y,z in some_tuples(B, 3, tester._max_runs):
v = sum(self.s_ij(x,w) * self.s_ij(y,w) * self.s_ij(z,w) / self.s_ij(i0,w) for w in B)
tester.assertEqual(v, c * self.N_ijk(x,y,z))
def _test_total_q_order(self, **options):
r"""
Check that the total quantum order is real and positive.
The total quantum order is the positive square root
of the global quantum dimension. This indirectly test the
Virasoro central charge.
EXAMPLES::
sage: G22 = FusionRing("G2",2)
sage: G22._test_total_q_order()
"""
tester = self._tester(**options)
tqo = self.total_q_order()
tester.assertTrue(tqo.is_real_positive())
tester.assertEqual(tqo**2, self.global_q_dimension())
def fusion_labels(self, labels=None, inject_variables=False):
r"""
Set the labels of the basis.
INPUT:
- ``labels`` -- (default: ``None``) a list of strings or string
- ``inject_variables`` -- (default: ``False``) if ``True``, then
inject the variable names into the global namespace; note that
this could override objects already defined
If ``labels`` is a list, the length of the list must equal the
number of basis elements. These become the names of
the basis elements.
If ``labels`` is a string, this is treated as a prefix and a
list of names is generated.
If ``labels`` is ``None``, then this resets the labels to the default.
EXAMPLES::
sage: A13 = FusionRing("A1", 3)
sage: A13.fusion_labels("x")
sage: fb = list(A13.basis()); fb
[x0, x1, x2, x3]
sage: Matrix([[x*y for y in A13.basis()] for x in A13.basis()])
[ x0 x1 x2 x3]
[ x1 x0 + x2 x1 + x3 x2]
[ x2 x1 + x3 x0 + x2 x1]
[ x3 x2 x1 x0]
We give an example where the variables are injected into the
global namespace::
sage: A13.fusion_labels("y", inject_variables=True)
sage: y0
y0
sage: y0.parent() is A13
True
We reset the labels to the default::
sage: A13.fusion_labels()
sage: fb
[A13(0), A13(1), A13(2), A13(3)]
sage: y0
A13(0)
"""
if labels is None:
# Remove the fusion labels
self._fusion_labels = None
return
B = self.basis()
if isinstance(labels, str):
labels = [labels + str(k) for k in range(len(B))]
elif len(labels) != len(B):
raise ValueError('invalid data')
d = {}
ac = self.simple_coroots()
for j, b in enumerate(self.get_order()):
t = tuple([b.inner_product(x) for x in ac])
d[t] = labels[j]
if inject_variables:
inject_variable(labels[j], B[b])
self._fusion_labels = d
@cached_method
def field(self):
r"""
Return a cyclotomic field large enough to
contain the `2 \ell`-th roots of unity, as well as
all the S-matrix entries.
EXAMPLES::
sage: FusionRing("A2",2).field()
Cyclotomic Field of order 60 and degree 16
sage: FusionRing("B2",2).field()
Cyclotomic Field of order 40 and degree 16
"""
return CyclotomicField(4 * self._cyclotomic_order)
def root_of_unity(self, r):
r"""
Return `e^{i\pi r}` as an element of ``self.field()`` if possible.
INPUT:
- ``r`` -- a rational number
EXAMPLES::
sage: A11 = FusionRing("A1",1)
sage: A11.field()
Cyclotomic Field of order 24 and degree 8
sage: [A11.root_of_unity(2/x) for x in [1..7]]
[1, -1, zeta24^4 - 1, zeta24^6, None, zeta24^4, None]
"""
n = 2 * r * self._cyclotomic_order
if n in ZZ:
return self.field().gen() ** n
else:
return None
def get_order(self):
r"""
Return the weights of the basis vectors in a fixed order.
You may change the order of the basis using :meth:`CombinatorialFreeModule.set_order`
EXAMPLES::
sage: A14 = FusionRing("A1",4)
sage: w = A14.get_order(); w
[(0, 0), (1/2, -1/2), (1, -1), (3/2, -3/2), (2, -2)]
sage: A14.set_order([w[k] for k in [0,4,1,3,2]])
sage: [A14(x) for x in A14.get_order()]
[A14(0), A14(4), A14(1), A14(3), A14(2)]
.. WARNING::
This duplicates :meth:`get_order` from
:class:`CombinatorialFreeModule` except the result
is *not* cached. Caching of
:meth:`CombinatorialFreeModule.get_order` causes inconsistent
results after calling :meth:`CombinatorialFreeModule.set_order`.
"""
if self._order is None:
self.set_order(self.basis().keys().list())
return self._order
def some_elements(self):
"""
Return some elements of ``self``.
EXAMPLES::
sage: D41 = FusionRing('D4', 1)
sage: D41.some_elements()
[D41(1,0,0,0), D41(0,0,1,0), D41(0,0,0,1)]
"""
return [self.monomial(x) for x in self.fundamental_weights()
if self.level(x) <= self._k]
def fusion_level(self):
r"""
Return the level `k` of ``self``.
EXAMPLES::
sage: B22 = FusionRing('B2',2)
sage: B22.fusion_level()
2
"""
return self._k
def fusion_l(self):
r"""
Return the product `\ell = m_g(k + h^\vee)`, where `m_g` denotes the
square of the ratio of the lengths of long to short roots of
the underlying Lie algebra, `k` denotes the level of the FusionRing,
and `h^\vee` denotes the dual Coxeter number of the underlying Lie
algebra.
This value is used to define the associated root `2\ell`-th
of unity `q = e^{i\pi/\ell}`.
EXAMPLES::
sage: B22 = FusionRing('B2',2)
sage: B22.fusion_l()
10
sage: D52 = FusionRing('D5',2)
sage: D52.fusion_l()
10
"""
return self._l
def virasoro_central_charge(self):
r"""
Return the Virasoro central charge of the WZW conformal
field theory associated with the Fusion Ring.
If `\mathfrak{g}` is the corresponding semisimple Lie algebra, this is
.. MATH::
\frac{k\dim\mathfrak{g}}{k+h^\vee},
where `k` is the level and `h^\vee` is the dual Coxeter number.
See [DFMS1996]_ Equation (15.61).
Let `d_i` and `\theta_i` be the quantum dimensions and
twists of the simple objects. By Proposition 2.3 in [RoStWa2009]_,
there exists a rational number `c` such that
`D_+ / \sqrt{D} = e^{i\pi c/4}`, where `D_+ = \sum d_i^2 \theta_i`
is computed in :meth:`D_plus` and `D = \sum d_i^2 > 0` is computed
by :meth:`global_q_dimension`. Squaring this identity and
remembering that `D_+ D_- = D` gives
.. MATH::
D_+ / D_- = e^{i\pi c/2}.
EXAMPLES::
sage: R = FusionRing("A1", 2)
sage: c = R.virasoro_central_charge(); c
3/2
sage: Dp = R.D_plus(); Dp
2*zeta32^6
sage: Dm = R.D_minus(); Dm
-2*zeta32^10
sage: Dp / Dm == R.root_of_unity(c/2)
True
"""
dim_g = len(self.space().roots()) + self.cartan_type().rank()
return self._conj * self._k * dim_g / (self._k + self._h_check)
def conj_matrix(self):
r"""
Return the conjugation matrix, which is the permutation matrix
for the conjugation (dual) operation on basis elements.
EXAMPLES::
sage: FusionRing("A2",1).conj_matrix()
[1 0 0]
[0 0 1]
[0 1 0]
"""
b = self.basis().list()
return matrix(ZZ, [[i == j.dual() for i in b] for j in b])
def twists_matrix(self):
r"""
Return a diagonal matrix describing the twist corresponding to
each simple object in the ``FusionRing``.
EXAMPLES::
sage: B21=FusionRing("B2",1)
sage: [x.twist() for x in B21.basis().list()]
[0, 1, 5/8]
sage: [B21.root_of_unity(x.twist()) for x in B21.basis().list()]
[1, -1, zeta32^10]
sage: B21.twists_matrix()
[ 1 0 0]
[ 0 -1 0]
[ 0 0 zeta32^10]
"""
B = self.basis()
return diagonal_matrix(B[x].ribbon() for x in self.get_order())
@cached_method
def N_ijk(self, elt_i, elt_j, elt_k):
r"""
Return the symmetric fusion coefficient `N_{ijk}`.
INPUT:
- ``elt_i``, ``elt_j``, ``elt_k`` -- elements of the fusion basis
This is the same as `N_{ij}^{k\ast}`, where `N_{ij}^k` are
the structure coefficients of the ring (see :meth:`Nk_ij`),
and `k\ast`` denotes the dual element. The coefficient `N_{ijk}`
is unchanged under permutations of the three basis vectors.
EXAMPLES::
sage: G23 = FusionRing("G2", 3)
sage: G23.fusion_labels("g")
sage: b = G23.basis().list(); b
[g0, g1, g2, g3, g4, g5]
sage: [(x,y,z) for x in b for y in b for z in b if G23.N_ijk(x,y,z) > 1]
[(g3, g3, g3), (g3, g3, g4), (g3, g4, g3), (g4, g3, g3)]
sage: all(G23.N_ijk(x,y,z)==G23.N_ijk(y,z,x) for x in b for y in b for z in b)
True
sage: all(G23.N_ijk(x,y,z)==G23.N_ijk(y,x,z) for x in b for y in b for z in b)
True
"""
return (elt_i * elt_j).monomial_coefficients().get(elt_k.dual().weight(), 0)
@cached_method
def Nk_ij(self, elt_i, elt_j, elt_k):
r"""
Return the fusion coefficient `N^k_{ij}`.
These are the structure coefficients of the fusion ring, so
.. MATH::
i * j = \sum_{k} N_{ij}^k k.
EXAMPLES::
sage: A22 = FusionRing("A2", 2)
sage: b = A22.basis().list()
sage: all(x*y == sum(A22.Nk_ij(x,y,k)*k for k in b) for x in b for y in b)
True
"""
return (elt_i * elt_j).monomial_coefficients(copy=False).get(elt_k.weight(), 0)
@cached_method
def s_ij(self, elt_i, elt_j):
r"""
Return the element of the S-matrix of this fusion ring corresponding to
the given elements.
This is computed using the formula
.. MATH::
s_{i,j} = \frac{1}{\theta_i\theta_j} \sum_k N_{ik}^j d_k \theta_k,
where `\theta_k` is the twist and `d_k` is the quantum
dimension. See [Row2006]_ Equation (2.2) or [EGNO2015]_
Proposition 8.13.8.
INPUT:
- ``elt_i``, ``elt_j`` -- elements of the fusion basis
EXAMPLES::
sage: G21 = FusionRing("G2", 1)
sage: b = G21.basis()
sage: [G21.s_ij(x, y) for x in b for y in b]
[1, -zeta60^14 + zeta60^6 + zeta60^4, -zeta60^14 + zeta60^6 + zeta60^4, -1]
"""
ijtwist = elt_i.twist() + elt_j.twist()
return sum(k.q_dimension() * self.Nk_ij(elt_i, k, elt_j)
* self.root_of_unity(k.twist() - ijtwist)
for k in self.basis())
def s_matrix(self, unitary=False):
r"""
Return the S-matrix of this fusion ring.
OPTIONAL:
- ``unitary`` -- (default: ``False``) set to ``True`` to obtain
the unitary S-matrix
Without the ``unitary`` parameter, this is the matrix denoted
`\widetilde{s}` in [BaKi2001]_.
EXAMPLES::
sage: D91 = FusionRing("D9", 1)
sage: D91.s_matrix()
[ 1 1 1 1]
[ 1 1 -1 -1]
[ 1 -1 -zeta136^34 zeta136^34]
[ 1 -1 zeta136^34 -zeta136^34]
sage: S = D91.s_matrix(unitary=True); S
[ 1/2 1/2 1/2 1/2]
[ 1/2 1/2 -1/2 -1/2]
[ 1/2 -1/2 -1/2*zeta136^34 1/2*zeta136^34]
[ 1/2 -1/2 1/2*zeta136^34 -1/2*zeta136^34]
sage: S*S.conjugate()
[1 0 0 0]
[0 1 0 0]
[0 0 1 0]
[0 0 0 1]
"""
b = self.basis()
S = matrix([[self.s_ij(b[x], b[y]) for x in self.get_order()] for y in self.get_order()])
if unitary:
return S / self.total_q_order()
else:
return S
@cached_method
def r_matrix(self, i, j, k):
r"""
Return the R-matrix entry corresponding to the subobject ``k``
in the tensor product of ``i`` with ``j``.
.. WARNING::
This method only gives complete information when `N_{ij}^k = 1`
(an important special case). Tables of MTC including R-matrices
may be found in Section 5.3 of [RoStWa2009]_ and in [Bond2007]_.
The R-matrix is a homomorphism `i \otimes j \rightarrow j \otimes i`.
This may be hard to describe since the object `i \otimes j`
may be reducible. However if `k` is a simple subobject of
`i \otimes j` it is also a subobject of `j \otimes i`. If we fix
embeddings `k \rightarrow i \otimes j`, `k \rightarrow j \otimes i`
we may ask for the scalar automorphism of `k` induced by the
R-matrix. This method computes that scalar. It is possible to
adjust the set of embeddings `k \rightarrow i \otimes j` (called
a *gauge*) so that this scalar equals
.. MATH::
\pm \sqrt{\frac{ \theta_k }{ \theta_i \theta_j }}.
If `i \neq j`, the gauge may be used to control the sign of
the square root. But if `i = j` then we must be careful
about the sign. These cases are computed by a formula
of [BDGRTW2019]_, Proposition 2.3.
EXAMPLES::
sage: I = FusionRing("E8", 2, conjugate=True) # Ising MTC
sage: I.fusion_labels(["i0","p","s"], inject_variables=True)
sage: I.r_matrix(s,s,i0) == I.root_of_unity(-1/8)
True
sage: I.r_matrix(p,p,i0)
-1
sage: I.r_matrix(p,s,s) == I.root_of_unity(-1/2)
True
sage: I.r_matrix(s,p,s) == I.root_of_unity(-1/2)
True
sage: I.r_matrix(s,s,p) == I.root_of_unity(3/8)
True
"""
if self.Nk_ij(i, j, k) == 0:
return 0
if i != j:
return self.root_of_unity((k.twist(reduced=False) - i.twist(reduced=False) - j.twist(reduced=False)) / 2)
i0 = self.one()
B = self.basis()
return sum(y.ribbon()**2 / (i.ribbon() * x.ribbon()**2)
* self.s_ij(i0,y) * self.s_ij(i,z) * self.s_ij(x,z).conjugate()
* self.s_ij(k,x).conjugate() * self.s_ij(y,z).conjugate() / self.s_ij(i0,z)
for x in B for y in B for z in B) / (self.total_q_order()**4)
def global_q_dimension(self):
r"""
Return `\sum d_i^2`, where the sum is over all simple objects
and `d_i` is the quantum dimension. It is a positive real number.
EXAMPLES::
sage: FusionRing("E6",1).global_q_dimension()
3
"""
return sum(x.q_dimension()**2 for x in self.basis())
def total_q_order(self):
r"""
Return the positive square root of ``self.global_q_dimension()``
as an element of ``self.field()``.
EXAMPLES::
sage: F = FusionRing("G2",1)
sage: tqo=F.total_q_order(); tqo
zeta60^15 - zeta60^11 - zeta60^9 + 2*zeta60^3 + zeta60
sage: tqo.is_real_positive()
True
sage: tqo^2 == F.global_q_dimension()
True
"""
c = self.virasoro_central_charge()
return self.D_plus() * self.root_of_unity(-c/4)
def D_plus(self):
r"""
Return `\sum d_i^2\theta_i` where `i` runs through the simple objects,
`d_i` is the quantum dimension and `\theta_i` is the twist.
This is denoted `p_+` in [BaKi2001]_ Chapter 3.
EXAMPLES::
sage: B31 = FusionRing("B3",1)
sage: Dp = B31.D_plus(); Dp
2*zeta48^13 - 2*zeta48^5
sage: Dm = B31.D_minus(); Dm
-2*zeta48^3
sage: Dp*Dm == B31.global_q_dimension()
True
sage: c = B31.virasoro_central_charge(); c
7/2
sage: Dp/Dm == B31.root_of_unity(c/2)
True
"""
return sum((x.q_dimension())**2 * x.ribbon() for x in self.basis())
def D_minus(self):
r"""
Return `\sum d_i^2\theta_i^{-1}` where `i` runs through the simple
objects, `d_i` is the quantum dimension and `\theta_i` is the twist.
This is denoted `p_-` in [BaKi2001]_ Chapter 3.
EXAMPLES::
sage: E83 = FusionRing("E8",3,conjugate=True)
sage: [Dp,Dm] = [E83.D_plus(), E83.D_minus()]
sage: Dp*Dm == E83.global_q_dimension()
True
sage: c = E83.virasoro_central_charge(); c
-248/11
sage: Dp*Dm == E83.global_q_dimension()
True
"""
return sum((x.q_dimension())**2 / x.ribbon() for x in self.basis())
class Element(WeylCharacterRing.Element):
"""
A class for FusionRing elements.
"""
def is_simple_object(self):
r"""
Determine whether ``self`` is a simple object of the fusion ring.
EXAMPLES::
sage: A22 = FusionRing("A2", 2)
sage: x = A22(1,0); x
A22(1,0)
sage: x.is_simple_object()
True
sage: x^2
A22(0,1) + A22(2,0)
sage: (x^2).is_simple_object()
False
"""
return self.parent()._k is not None and len(self._monomial_coefficients) == 1
def weight(self):
r"""
Return the parametrizing dominant weight in the level `k` alcove.
This method is only available for basis elements.
EXAMPLES::
sage: A21 = FusionRing("A2",1)
sage: [x.weight() for x in A21.basis().list()]
[(0, 0, 0), (2/3, -1/3, -1/3), (1/3, 1/3, -2/3)]
"""
if len(self._monomial_coefficients) != 1:
raise ValueError("fusion weight is valid for basis elements only")
return next(iter(self._monomial_coefficients))
def twist(self, reduced=True):
r"""
Return a rational number `h` such that `\theta = e^{i \pi h}`
is the twist of ``self``. The quantity `e^{i \pi h}` is
also available using :meth:`ribbon`.
This method is only available for simple objects. If
`\lambda` is the weight of the object, then
`h = \langle \lambda, \lambda+2\rho \rangle`, where
`\rho` is half the sum of the positive roots.
As in [Row2006]_, this requires normalizing
the invariant bilinear form so that
`\langle \alpha, \alpha \rangle = 2` for short roots.
INPUT:
- ``reduced`` -- (default: ``True``) boolean; if ``True``
then return the twist reduced modulo 2
EXAMPLES::
sage: G21 = FusionRing("G2", 1)
sage: [x.twist() for x in G21.basis()]
[0, 4/5]
sage: [G21.root_of_unity(x.twist()) for x in G21.basis()]
[1, zeta60^14 - zeta60^4]
sage: zeta60 = G21.field().gen()
sage: zeta60^((4/5)*(60/2))
zeta60^14 - zeta60^4
sage: F42 = FusionRing("F4", 2)
sage: [x.twist() for x in F42.basis()]
[0, 18/11, 2/11, 12/11, 4/11]
sage: E62 = FusionRing("E6", 2)
sage: [x.twist() for x in E62.basis()]
[0, 26/21, 12/7, 8/21, 8/21, 26/21, 2/3, 4/7, 2/3]
"""
if not self.is_simple_object():
raise ValueError("quantum twist is only available for simple objects of a FusionRing")
P = self.parent()
rho = P.space().rho()
# We copy self.weight() to skip the test (which was already done
# by self.is_simple_object()).
lam = next(iter(self._monomial_coefficients))
inner = lam.inner_product(lam + 2*rho)
twist = P._conj * P._nf * inner / P.fusion_l()
# Reduce modulo 2
if reduced:
f = twist.floor()
twist -= f
return twist + (f % 2)
else:
return twist
def ribbon(self):
r"""
Return the twist or ribbon element of ``self``.
If `h` is the rational number modulo 2 produced by
``self.twist()``, this method produces `e^{i\pi h}`.
.. SEEALSO::
An additive version of this is available as :meth:`twist`.
EXAMPLES::
sage: F = FusionRing("A1",3)
sage: [x.twist() for x in F.basis()]
[0, 3/10, 4/5, 3/2]
sage: [x.ribbon() for x in F.basis()]
[1, zeta40^6, zeta40^12 - zeta40^8 + zeta40^4 - 1, -zeta40^10]
sage: [F.root_of_unity(x) for x in [0, 3/10, 4/5, 3/2]]
[1, zeta40^6, zeta40^12 - zeta40^8 + zeta40^4 - 1, -zeta40^10]
"""
return self.parent().root_of_unity(self.twist())
@cached_method
def q_dimension(self):
r"""
Return the quantum dimension as an element of the cyclotomic
field of the `2\ell`-th roots of unity, where `l = m (k+h^\vee)`
with `m=1,2,3` depending on whether type is simply, doubly or
triply laced, `k` is the level and `h^\vee` is the dual
Coxeter number.
EXAMPLES::
sage: B22 = FusionRing("B2",2)
sage: [(b.q_dimension())^2 for b in B22.basis()]
[1, 4, 5, 1, 5, 4]
"""
if not self.is_simple_object():
raise ValueError("quantum dimension is only available for simple objects of a FusionRing")
P = self.parent()
lam = self.weight()
space = P.space()
rho = space.rho()
powers = {}
for alpha in space.positive_roots():
val = alpha.inner_product(lam + rho)
if val in powers:
powers[val] += 1
else:
powers[val] = 1
val = alpha.inner_product(rho)
if val in powers:
powers[val] -= 1
else:
powers[val] = -1
R = ZZ['q']
q = R.gen()
expr = R.fraction_field().one()
for val in powers:
exp = powers[val]
if exp > 0:
expr *= q_int(P._nf * val, q)**exp
elif exp < 0:
expr /= q_int(P._nf * val, q)**(-exp)
expr = R(expr)
expr = expr.substitute(q=q**4) / (q**(2*expr.degree()))
zet = P.field().gen() ** (P._cyclotomic_order/P._l)
return expr.substitute(q=zet)
| 19,135 |
3,363 | <filename>lxd/include/config.h<gh_stars>1000+
#ifndef CONFIG_H
#define CONFIG_H
#ifndef _GNU_SOURCE
#define _GNU_SOURCE 1
#endif
#endif /* CONFIG_H */
| 65 |
369 | <reponame>bitigchi/MuditaOS
# Copyright (c) 2017-2021, Mudit<NAME>.o.o. All rights reserved.
# For licensing, see https://github.com/mudita/MuditaOS/LICENSE.md
import argparse
import os
parser = argparse.ArgumentParser(
usage=
"\ne.g.:\n"
"For simulator: python3 tools/mount_user_lfs_partition.py --mount_dir /mnt/user --lfsfuse build-linux-Debug/lfsfuse --image build-linux-Debug/PurePhone.img --loop 1\n"
"For device (MSC mode): python3 tools/mount_user_lfs_partition.py --mount_dir /mnt/user --lfsfuse build-rt1051-RelWithDebInfo/lfsfuse --part3_path /dev/sda3")
parser.add_argument('--mount_dir', type=str,
help="Directory where LFS should be mounted", required=True)
parser.add_argument('--lfsfuse', type=str,
help="Path to `lfsfuse`", required=True)
parser.add_argument('--part3_path', type=str, help="Path to third partition (when mounting PURE LFS) e.g. `/dev/sda3`")
parser.add_argument('--image', type=str, help="Path to PurePhone.img (when mounting LINUX IMAGE LFS)")
parser.add_argument('--loop', type=int,
help="Loop device number - different than already existed, see `losetup --list` (when mounting LINUX IMAGE LFS)")
args = parser.parse_args()
if args.loop and args.image:
command = "sudo losetup -P /dev/loop" + str(args.loop) + " " + str(args.image)
print("Executing command:", command)
result = os.system(command)
if result != 0:
os._exit(result)
command = "sudo chmod 666 /dev/loop" + str(args.loop)
print("Executing command:", command)
result = os.system(command)
if result != 0:
os._exit(result)
elif not args.part3_path:
print("ERROR! part3_path arg not provided!")
os._exit(-1)
if not os.path.exists(args.mount_dir):
print("Mount dir not exist! Trying to create it...")
command = "sudo mkdir " + args.mount_dir
print("Executing command:", command)
result = os.system(command)
if result != 0:
os._exit(result)
if not os.path.exists(args.mount_dir):
print("Failed to create mount dir!")
os._exit(-1)
else:
print("Mount dir created!")
command = "sudo " + args.lfsfuse + " "
if args.loop:
command += "/dev/loop" + str(args.loop) + "p3 "
else:
command += args.part3_path + " "
command += args.mount_dir + " --block_size=32768"
print("Executing command:", command)
result = os.system(command)
if result != 0:
os._exit(result)
print("Procedure completed. Please check " + args.mount_dir)
| 992 |
607 | <gh_stars>100-1000
package dev.cheerfun.pixivic.biz.web.user.dto;
import lombok.Data;
/**
* @author OysterQAQ
* @version 1.0
* @date 2021/1/26 3:16 PM
* @description VerifiedDTO
*/
@Data
public class VerifiedDTO {
private String name;
private String exchangeCode;
private String idCard;
}
| 120 |
5,547 | <gh_stars>1000+
//////////////////////////////////////////////////////////////////////////////////
//
//
//存储图片数据,图片大小为64*32像素
//
/////////////////////////////////////////////////////////////////////////////////
#ifndef __BMP_H
#define __BMP_H
unsigned char BMP1[] =
{0x00,0x06,0x0A,0xFE,0x0A,0xC6,0x00,0xE0,0x00,0xF0,0x00,0xF8,0x00,0x00,0x00,0x00,
0x00,0x00,0xFE,0x7D,0xBB,0xC7,0xEF,0xEF,0xEF,0xEF,0xEF,0xEF,0xEF,0xC7,0xBB,0x7D,
0xFE,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,
0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,
0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,
0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x08,
0x0C,0xFE,0xFE,0x0C,0x08,0x20,0x60,0xFE,0xFE,0x60,0x20,0x00,0x00,0x00,0x78,0x48,
0xFE,0x82,0xBA,0xBA,0x82,0xBA,0xBA,0x82,0xBA,0xBA,0x82,0xBA,0xBA,0x82,0xFE,0x00,
0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,
0x00,0x00,0x00,0x01,0x01,0x01,0x01,0x01,0x01,0x01,0x01,0x01,0x01,0x01,0x01,0x01,
0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,
0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,
0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,
0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,
0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,
0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,
0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,
0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xFE,0xFF,
0x03,0x03,0x03,0x03,0x03,0x03,0x03,0x03,0x03,0xFF,0xFF,0x00,0x00,0xFE,0xFF,0x03,
0x03,0x03,0x03,0x03,0x03,0x03,0x03,0x03,0xFF,0xFE,0x00,0x00,0x00,0x00,0xC0,0xC0,
0xC0,0x00,0x00,0x00,0x00,0xFE,0xFF,0x03,0x03,0x03,0x03,0x03,0x03,0x03,0x03,0x03,
0xFF,0xFE,0x00,0x00,0xFE,0xFF,0x03,0x03,0x03,0x03,0x03,0x03,0x03,0x03,0x03,0xFF,
0xFE,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,
0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,
0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,
0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xFF,0xFF,
0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xFF,0xFF,0x00,0x00,0xFF,0xFF,0x0C,
0x0C,0x0C,0x0C,0x0C,0x0C,0x0C,0x0C,0x0C,0xFF,0xFF,0x00,0x00,0x00,0x00,0xE1,0xE1,
0xE1,0x00,0x00,0x00,0x00,0xFF,0xFF,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,
0xFF,0xFF,0x00,0x00,0xFF,0xFF,0x0C,0x0C,0x0C,0x0C,0x0C,0x0C,0x0C,0x0C,0x0C,0xFF,
0xFF,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,
0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,
0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,
0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x0F,0x1F,
0x18,0x18,0x18,0x18,0x18,0x18,0x18,0x18,0x18,0x1F,0x0F,0x00,0x00,0x0F,0x1F,0x18,
0x18,0x18,0x18,0x18,0x18,0x18,0x18,0x18,0x1F,0x0F,0x00,0x00,0x00,0x00,0x00,0x00,
0x00,0x00,0x00,0x00,0x00,0x0F,0x1F,0x18,0x18,0x18,0x18,0x18,0x18,0x18,0x18,0x18,
0x1F,0x0F,0x00,0x00,0x0F,0x1F,0x18,0x18,0x18,0x18,0x18,0x18,0x18,0x18,0x18,0x1F,
0x0F,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,
0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,
0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,
0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,
0x00,0x00,0x8C,0x42,0x22,0x12,0x0C,0x00,0xFC,0x02,0x02,0x02,0xFC,0x00,0x00,0x04,
0xFE,0x00,0x00,0x00,0x20,0x58,0x44,0xFE,0x40,0x00,0x10,0x10,0x10,0x10,0x10,0x00,
0x00,0x04,0xFE,0x00,0x00,0x00,0xFC,0x02,0x02,0x02,0xFC,0x00,0x10,0x10,0x10,0x10,
0x10,0x00,0xFC,0x02,0x02,0x02,0xFC,0x00,0x00,0x04,0xFE,0x00,0x00,0x00,0x00,0x00,
0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,
0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,
0x00,0x24,0xA4,0x2E,0x24,0xE4,0x24,0x2E,0xA4,0x24,0x00,0x00,0x00,0xF8,0x4A,0x4C,
0x48,0xF8,0x48,0x4C,0x4A,0xF8,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,
0x00,0x00,0x01,0x01,0x01,0x01,0x01,0x00,0x00,0x01,0x01,0x01,0x00,0x00,0x00,0x01,
0x01,0x01,0x00,0x00,0x00,0x00,0x00,0x01,0x01,0x00,0x00,0x00,0x00,0x00,0x00,0x00,
0x00,0x01,0x01,0x01,0x00,0x00,0x00,0x01,0x01,0x01,0x00,0x00,0x00,0x00,0x00,0x00,
0x00,0x00,0x00,0x01,0x01,0x01,0x00,0x00,0x00,0x01,0x01,0x01,0x00,0x00,0x00,0x00,
0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xC0,0x20,0x10,0x10,
0x10,0x10,0x20,0xC0,0x00,0x00,0xC0,0x20,0x10,0x10,0x10,0x10,0x20,0xC0,0x00,0x00,
0x00,0x12,0x0A,0x07,0x02,0x7F,0x02,0x07,0x0A,0x12,0x00,0x00,0x00,0x0B,0x0A,0x0A,
0x0A,0x7F,0x0A,0x0A,0x0A,0x0B,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,
0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,
0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,
0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,
0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,
0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x1F,0x20,0x40,0x40,
0x40,0x50,0x20,0x5F,0x80,0x00,0x1F,0x20,0x40,0x40,0x40,0x50,0x20,0x5F,0x80,0x00,
};
#endif
| 4,342 |
2,258 | import tempfile
from datetime import datetime, timedelta
from pathlib import Path
from textwrap import dedent
from tests.utils.cli_utils import CliRunner
def test_repo_init() -> None:
"""
This test simply makes sure that you can run `feast apply && feast materialize` on
the repo created by "feast init" without errors.
"""
runner = CliRunner()
with tempfile.TemporaryDirectory() as temp_dir:
temp_path = Path(temp_dir)
result = runner.run(["init", "my_project"], cwd=temp_path)
repo_path = temp_path / "my_project"
assert result.returncode == 0
result = runner.run(["apply"], cwd=repo_path)
assert result.returncode == 0
end_date = datetime.utcnow()
start_date = end_date - timedelta(days=100)
result = runner.run(
["materialize", start_date.isoformat(), end_date.isoformat()], cwd=repo_path
)
assert result.returncode == 0
def test_repo_init_with_underscore_in_project_name() -> None:
"""
Test `feast init` with underscore in the project name
"""
with tempfile.TemporaryDirectory() as temp_dir:
temp_path = Path(temp_dir)
runner = CliRunner()
# `feast init` should fail with repo names start with underscore
invalid_repo_names = ["_test", "_test_1"]
for repo_name in invalid_repo_names:
result = runner.run(["init", repo_name], cwd=temp_path)
assert result.returncode != 0
# `feast init` should succeed with underscore in repo name
valid_repo_names = ["test_1"]
for repo_name in valid_repo_names:
result = runner.run(["init", repo_name], cwd=temp_path)
assert result.returncode == 0
# `feast apply` should fail with underscore in project name
project_name = "test_1"
repo_dir = temp_path / project_name
data_dir = repo_dir / "data"
repo_config = repo_dir / "feature_store.yaml"
repo_config.write_text(
dedent(
f"""
project: __foo
registry: {data_dir / "registry.db"}
provider: local
online_store:
path: {data_dir / "online_store.db"}
"""
)
)
result = runner.run(["apply"], cwd=repo_dir)
assert result.returncode != 0
| 1,001 |
335 | <filename>flutter_vlc_player/android/src/main/java/software/solid/fluttervlcplayer/Enums/HwAcc.java<gh_stars>100-1000
package software.solid.fluttervlcplayer.Enums;
public enum HwAcc {
AUTOMATIC(-1),
DISABLED(0),
DECODING(1),
FULL(2);
private int mType;
HwAcc (int type)
{
this.mType = type;
}
public int getNumericType() {
return mType;
}
}
| 184 |
592 | <filename>Project/LooseLeaf/MMScrapViewState.h
//
// MMScrapViewState.h
// LooseLeaf
//
// Created by <NAME> on 10/1/13.
// Copyright (c) 2013 Milestone Made, LLC. All rights reserved.
//
#import <Foundation/Foundation.h>
#import <JotUI/JotUI.h>
#import "MMScrapBackgroundView.h"
#import "MMScrapViewStateDelegate.h"
#import "MMScrapCollectionState.h"
#import "MMDecompressImagePromiseDelegate.h"
@interface MMScrapViewState : NSObject <JotViewStateProxyDelegate, MMDecompressImagePromiseDelegate> {
// unloadable state
// this state can be loaded and unloaded
// to conserve memeory as needed
JotViewStateProxy* drawableViewState;
// delegate
__weak NSObject<MMScrapViewStateDelegate>* delegate;
// our owning paper
__weak MMScrapCollectionState* scrapsOnPaperState;
}
+ (BOOL)isImportExportScrapStateQueue;
@property (weak) NSObject<MMScrapViewStateDelegate>* delegate;
@property (readonly) UIBezierPath* bezierPath;
@property (readonly) CGSize originalSize;
@property (readonly) UIView* contentView;
@property (readonly) CGRect drawableBounds;
@property (readonly) NSString* uuid;
@property (readonly) JotView* drawableView;
@property (readonly) NSString* pathForScrapAssets;
@property (nonatomic, weak) MMScrapCollectionState* scrapsOnPaperState;
@property (nonatomic, readonly) int fullByteSize;
@property (readonly) NSUInteger lastSavedUndoHash;
- (id)initWithUUID:(NSString*)uuid andPaperState:(MMScrapCollectionState*)scrapsOnPaperState;
- (id)initWithUUID:(NSString*)uuid andBezierPath:(UIBezierPath*)bezierPath andPaperState:(MMScrapCollectionState*)scrapsOnPaperState;
- (void)saveScrapStateToDisk:(void (^)(BOOL hadEditsToSave))doneSavingBlock;
- (void)loadCachedScrapPreview;
- (void)unloadCachedScrapPreview;
- (UIImage*)oneOffLoadedThumbnailImage;
- (void)loadScrapStateAsynchronously:(BOOL)async;
- (void)unloadState;
- (void)unloadStateButKeepThumbnailIfAny;
- (BOOL)isScrapStateLoaded;
- (BOOL)isScrapStateLoading;
- (BOOL)hasEditsToSave;
- (UIImage*)activeThumbnailImage;
- (void)addElements:(NSArray*)elements withTexture:(JotBrushTexture*)texture;
- (void)addUndoLevelAndFinishStroke;
- (JotGLTexture*)generateTexture;
- (void)importTexture:(JotGLTexture*)texture atP1:(CGPoint)p1 andP2:(CGPoint)p2 andP3:(CGPoint)p3 andP4:(CGPoint)p4 withTextureSize:(CGSize)textureSize;
- (MMScrapBackgroundView*)backgroundView;
- (void)setBackgroundView:(MMScrapBackgroundView*)backgroundView;
- (CGPoint)currentCenterOfScrapBackground;
- (void)reloadBackgroundView;
- (UIView*)contentView;
@end
| 904 |
737 | <reponame>etnrlz/rtbkit
/* glz_classifier.cc
<NAME>, 6 August 2003
Copyright (c) 2003 <NAME>. All rights reserved.
$Source$
Implementation of the GLZ classifier.
*/
#include "glz_classifier.h"
#include "classifier_persist_impl.h"
#include "null_feature_space.h"
#include "dense_features.h"
#include "jml/algebra/irls.h"
#include <boost/timer.hpp>
#include "training_index.h"
#include "config_impl.h"
#include <limits>
#include "jml/utils/vector_utils.h"
#include "jml/compiler/compiler.h"
using namespace std;
using namespace ML::DB;
namespace ML {
/*****************************************************************************/
/* GLZ_CLASSIFIER */
/*****************************************************************************/
GLZ_Classifier::GLZ_Classifier()
: add_bias(true), link(LOGIT), optimized_(false)
{
}
GLZ_Classifier::
GLZ_Classifier(const std::shared_ptr<const Feature_Space> & fs,
const Feature & predicted)
: Classifier_Impl(fs, predicted), add_bias(true), link(LOGIT),
optimized_(false)
{
}
GLZ_Classifier::
GLZ_Classifier(DB::Store_Reader & store,
const std::shared_ptr<const Feature_Space> & fs)
{
reconstitute(store, fs);
}
GLZ_Classifier::
GLZ_Classifier(DB::Store_Reader & store)
{
reconstitute(store);
set_feature_space(std::shared_ptr<const Feature_Space>
(new Null_Feature_Space()));
}
GLZ_Classifier::~GLZ_Classifier()
{
}
distribution<float>
GLZ_Classifier::
extract(const Feature_Set & feature_set) const
{
distribution<float> result(features.size());
float NaN = std::numeric_limits<float>::quiet_NaN();
Feature_Set::const_iterator
prev_last = feature_set.begin(),
fend = feature_set.end();
for (unsigned i = 0; i < features.size(); ++i) {
const Feature & to_find = features[i].feature;
Feature_Set::const_iterator first, last;
// Optimization: assume that the features are there in the same order
// as we wanted to access them, so that we can simply step through
// rather than having to search for them each time.
if (prev_last != fend && prev_last.feature() == to_find) {
last = first = prev_last;
do {
++last;
} while (last != fend && last.feature() == to_find);
}
else {
boost::tie(first, last) = feature_set.find(features[i].feature);
}
prev_last = last;
switch (features[i].type) {
case Feature_Spec::VALUE_IF_PRESENT:
if (first == last || isnan((*first).second)) {
result[i] = NaN;
break;
}
// fall through
case Feature_Spec::VALUE:
if (last - first != 1)
throw Exception("GLZ_Classifier::decode() feature "
+ feature_space_->print(features[i].feature)
+ " occurred " + ostream_format(last - first)
+ " times; exactly 1 required");
if (isnan((*first).second))
throw Exception("GLZ_Classifier::decode() feature "
+ feature_space_->print(features[i].feature)
+ " was missing");
result[i] = (*first).second;
break;
case Feature_Spec::PRESENCE:
if (first == last || isnan((*first).second)) {
result[i] = NaN;
break;
}
result[i] = (*first).second;
break;
default:
throw Exception("GLZ_Classifier::decode(): invalid type");
}
}
return result;
}
distribution<float>
GLZ_Classifier::
decode(const Feature_Set & feature_set) const
{
distribution<float> result = extract(feature_set);
for (unsigned i = 0; i < result.size(); ++i) {
result[i] = decode_value(result[i], features[i]);
}
return result;
}
Label_Dist
GLZ_Classifier::predict(const Feature_Set & features,
PredictionContext * context) const
{
distribution<float> features_c = extract(features);
Label_Dist result = predict(features_c);
return result;
}
Label_Dist
GLZ_Classifier::predict(const distribution<float> & features_c,
PredictionContext * context) const
{
if (features_c.size() != features.size())
throw Exception("wrong number of features");
return do_predict_impl(&features_c[0], 0);
}
bool
GLZ_Classifier::
optimization_supported() const
{
return true;
}
bool
GLZ_Classifier::
predict_is_optimized() const
{
return optimized_;
}
bool
GLZ_Classifier::
optimize_impl(Optimization_Info & info)
{
feature_indexes.clear();
// Fill in the feature order
for (unsigned i = 0; i < features.size(); ++i) {
map<Feature, int>::const_iterator it
= info.feature_to_optimized_index.find(features[i].feature);
if (it == info.feature_to_optimized_index.end())
throw Exception("GLZ_Classifier::optimize(): feature not found");
feature_indexes.push_back(it->second);
}
return optimized_ = true;
}
Label_Dist
GLZ_Classifier::
optimized_predict_impl(const float * features_c,
const Optimization_Info & info,
PredictionContext * context) const
{
return do_predict_impl(features_c, &feature_indexes[0]);
}
void
GLZ_Classifier::
optimized_predict_impl(const float * features_c,
const Optimization_Info & info,
double * accum_out,
double weight,
PredictionContext * context) const
{
do_predict_impl(features_c, &feature_indexes[0], accum_out, weight);
}
float
GLZ_Classifier::
optimized_predict_impl(int label,
const float * features_c,
const Optimization_Info & info,
PredictionContext * context) const
{
return do_predict_impl(label, features_c, &feature_indexes[0]);
}
float
GLZ_Classifier::
decode_value(float feat_val, const Feature_Spec & spec) const
{
if (JML_UNLIKELY(isnan(feat_val))) {
switch (spec.type) {
case Feature_Spec::VALUE:
feat_val = 0.0;
#if 0
throw Exception("GLZ_Classifier: feature "
+ feature_space()->print(spec.feature)
+ " is missing");
#endif
case Feature_Spec::VALUE_IF_PRESENT:
case Feature_Spec::PRESENCE:
feat_val = 0.0;
break;
default:
throw Exception("invalid feature spec type");
}
}
else if (JML_UNLIKELY(spec.type == Feature_Spec::PRESENCE))
feat_val = 1.0;
else if (JML_UNLIKELY(!isfinite(feat_val)))
throw Exception("GLZ_Classifier: feature "
+ feature_space()->print(spec.feature)
+ " is not finite");
return feat_val;
}
double
GLZ_Classifier::
do_accum(const float * features_c,
const int * indexes,
int label) const
{
double accum = 0.0;
for (unsigned j = 0; j < features.size(); ++j) {
int idx = (indexes ? indexes[j] : j);
float feat_val = decode_value(features_c[idx], features[j]);
accum += feat_val * weights[label][j];
}
if (add_bias) accum += weights[label][features.size()];
//cerr << "do accum " << label << " = " << accum << endl;
return apply_link_inverse(accum, link);
}
Label_Dist
GLZ_Classifier::
do_predict_impl(const float * features_c,
const int * indexes) const
{
Label_Dist result(label_count());
for (unsigned i = 0; i < result.size(); ++i)
result[i] = do_accum(features_c, indexes, i);
return result;
}
void
GLZ_Classifier::
do_predict_impl(const float * features_c,
const int * indexes,
double * accum,
double weight) const
{
int nl = label_count();
for (unsigned i = 0; i < nl; ++i)
accum[i] += weight * do_accum(features_c, indexes, i);
}
float
GLZ_Classifier::
do_predict_impl(int label,
const float * features_c,
const int * indexes) const
{
return do_accum(features_c, indexes, label);
}
std::vector<ML::Feature>
GLZ_Classifier::
all_features() const
{
vector<ML::Feature> result;
for (unsigned i = 0; i < features.size(); ++i)
if (i == 0 || features[i].feature != result.back())
result.push_back(features[i].feature);
return result;
}
Output_Encoding
GLZ_Classifier::
output_encoding() const
{
if (label_count() == 1) return OE_PM_INF;
else return OE_PROB;
}
Explanation
GLZ_Classifier::
explain(const Feature_Set & feature_set,
int label,
double weight,
PredictionContext * context) const
{
Explanation result(feature_space(), weight);
for (unsigned j = 0; j < features.size(); ++j) {
float feat_val = decode_value(feature_set[features[j].feature],
features[j]);
result.feature_weights[features[j].feature]
+= weight * weights[label][j] * feat_val;
}
if (add_bias) result.bias += weight * weights[label][features.size()];
return result;
}
std::string GLZ_Classifier::print() const
{
return "GLZ_Classifier: link " + ML::print(link);
}
std::string GLZ_Classifier::summary() const
{
return "GLZ_Classifier: link " + ML::print(link);
}
namespace {
static const std::string GLZ_CLASSIFIER_MAGIC = "GLZ_CLASSIFIER";
static const compact_size_t GLZ_CLASSIFIER_VERSION = 3;
} // file scope
std::string GLZ_Classifier::class_id() const
{
return GLZ_CLASSIFIER_MAGIC;
}
void GLZ_Classifier::serialize(DB::Store_Writer & store) const
{
store << GLZ_CLASSIFIER_MAGIC << GLZ_CLASSIFIER_VERSION;
feature_space()->serialize(store, predicted_);
store << (int)add_bias << weights << link;
store << compact_size_t(features.size());
for (unsigned i = 0; i < features.size(); ++i) {
feature_space_->serialize(store, features[i].feature);
store << (char)features[i].type;
}
store << compact_size_t(0x12345);
}
void GLZ_Classifier::
reconstitute(DB::Store_Reader & store)
{
string magic;
compact_size_t version;
store >> magic >> version;
if (magic != GLZ_CLASSIFIER_MAGIC)
throw Exception("Attempt to reconstitute \"" + magic
+ "\" with boosted stumps reconstitutor");
if (version != GLZ_CLASSIFIER_VERSION)
throw Exception(format("Attemp to reconstitute GLZ classifier "
"version %zd, only %zd supported",
version.size_,
GLZ_CLASSIFIER_VERSION.size_));
if (!feature_space_)
throw Exception("GLZ_Classifier::reconstitute(): feature space not "
"initialised");
predicted_ = MISSING_FEATURE;
if (version >= 2) // added in version 2
feature_space()->reconstitute(store, predicted_);
int add_bias_; store >> add_bias_; add_bias = add_bias_;
store >> weights >> link;
compact_size_t nf(store);
features.resize(nf);
if (version < 3) {
for (unsigned i = 0; i < features.size(); ++i)
feature_space_->reconstitute(store, features[i].feature);
}
else {
for (unsigned i = 0; i < features.size(); ++i) {
feature_space_->reconstitute(store, features[i].feature);
char c; store >> c;
features[i].type = Feature_Spec::Type(c);
}
}
compact_size_t guard(store);
if (guard != 0x12345)
throw Exception("GLZ_Classifier::reconstitute(): bad guard value");
init(feature_space_, predicted_, weights.size());
optimized_ = false;
}
void GLZ_Classifier::
reconstitute(DB::Store_Reader & store,
const std::shared_ptr<const Feature_Space> & fs)
{
feature_space_ = fs;
reconstitute(store);
}
GLZ_Classifier * GLZ_Classifier::make_copy() const
{
return new GLZ_Classifier(*this);
}
/*****************************************************************************/
/* REGISTRATION */
/*****************************************************************************/
namespace {
Register_Factory<Classifier_Impl, GLZ_Classifier>
GLZC_REGISTER(GLZ_CLASSIFIER_MAGIC);
} // file scope
} // namespace ML
| 5,685 |
2,151 | // Copyright (c) 2016 The WebM project authors. All Rights Reserved.
//
// Use of this source code is governed by a BSD-style license
// that can be found in the LICENSE file in the root of the source
// tree. An additional intellectual property rights grant can be found
// in the file PATENTS. All contributing project authors may
// be found in the AUTHORS file in the root of the source tree.
#include "testing/test_util.h"
#include <cstdint>
#include <cstdio>
#include <cstdlib>
#include <cstring>
#include <ios>
#include <string>
#include "common/libwebm_util.h"
#include "common/webmids.h"
#include "mkvparser/mkvparser.h"
#include "mkvparser/mkvreader.h"
namespace test {
std::string GetTestDataDir() {
const char* test_data_path = std::getenv("LIBWEBM_TEST_DATA_PATH");
return test_data_path ? std::string(test_data_path) : std::string();
}
std::string GetTestFilePath(const std::string& name) {
const std::string libwebm_testdata_dir = GetTestDataDir();
return libwebm_testdata_dir + "/" + name;
}
bool CompareFiles(const std::string& file1, const std::string& file2) {
const std::size_t kBlockSize = 4096;
std::uint8_t buf1[kBlockSize] = {0};
std::uint8_t buf2[kBlockSize] = {0};
libwebm::FilePtr f1 =
libwebm::FilePtr(std::fopen(file1.c_str(), "rb"), libwebm::FILEDeleter());
libwebm::FilePtr f2 =
libwebm::FilePtr(std::fopen(file2.c_str(), "rb"), libwebm::FILEDeleter());
if (!f1.get() || !f2.get()) {
// Files cannot match if one or both couldn't be opened.
return false;
}
do {
const std::size_t r1 = std::fread(buf1, 1, kBlockSize, f1.get());
const std::size_t r2 = std::fread(buf2, 1, kBlockSize, f2.get());
// TODO(fgalligan): Add output of which byte differs.
if (r1 != r2 || std::memcmp(buf1, buf2, r1)) {
return 0; // Files are not equal
}
} while (!std::feof(f1.get()) && !std::feof(f2.get()));
return std::feof(f1.get()) && std::feof(f2.get());
}
bool HasCuePoints(const mkvparser::Segment* segment,
std::int64_t* cues_offset) {
if (!segment || !cues_offset) {
return false;
}
using mkvparser::SeekHead;
const SeekHead* const seek_head = segment->GetSeekHead();
if (!seek_head) {
return false;
}
std::int64_t offset = 0;
for (int i = 0; i < seek_head->GetCount(); ++i) {
const SeekHead::Entry* const entry = seek_head->GetEntry(i);
if (entry->id == libwebm::kMkvCues) {
offset = entry->pos;
}
}
if (offset <= 0) {
// No Cues found.
return false;
}
*cues_offset = offset;
return true;
}
bool ValidateCues(mkvparser::Segment* segment, mkvparser::IMkvReader* reader) {
if (!segment) {
return false;
}
std::int64_t cues_offset = 0;
if (!HasCuePoints(segment, &cues_offset)) {
// No cues to validate, everything is OK.
return true;
}
// Parse Cues.
long long cues_pos = 0; // NOLINT
long cues_len = 0; // NOLINT
if (segment->ParseCues(cues_offset, cues_pos, cues_len)) {
return false;
}
// Get a pointer to the video track if it exists. Otherwise, we assume
// that Cues are based on the first track (which is true for all our test
// files).
const mkvparser::Tracks* const tracks = segment->GetTracks();
const mkvparser::Track* cues_track = tracks->GetTrackByIndex(0);
for (int i = 1; i < static_cast<int>(tracks->GetTracksCount()); ++i) {
const mkvparser::Track* const track = tracks->GetTrackByIndex(i);
if (track->GetType() == mkvparser::Track::kVideo) {
cues_track = track;
break;
}
}
// Iterate through Cues and verify if they are pointing to the correct
// Cluster position.
const mkvparser::Cues* const cues = segment->GetCues();
const mkvparser::CuePoint* cue_point = NULL;
while (cues->LoadCuePoint()) {
if (!cue_point) {
cue_point = cues->GetFirst();
} else {
cue_point = cues->GetNext(cue_point);
}
const mkvparser::CuePoint::TrackPosition* const track_position =
cue_point->Find(cues_track);
const long long cluster_pos = track_position->m_pos + // NOLINT
segment->m_start;
// If a cluster does not begin at |cluster_pos|, then the file is
// incorrect.
long length; // NOLINT
const std::int64_t id = mkvparser::ReadID(reader, cluster_pos, length);
if (id != libwebm::kMkvCluster) {
return false;
}
}
return true;
}
MkvParser::~MkvParser() {
delete segment;
delete reader;
}
bool ParseMkvFileReleaseParser(const std::string& webm_file,
MkvParser* parser_out) {
parser_out->reader = new (std::nothrow) mkvparser::MkvReader;
mkvparser::MkvReader& reader = *parser_out->reader;
if (!parser_out->reader || reader.Open(webm_file.c_str()) < 0) {
return false;
}
long long pos = 0; // NOLINT
mkvparser::EBMLHeader ebml_header;
if (ebml_header.Parse(&reader, pos)) {
return false;
}
using mkvparser::Segment;
Segment* segment_ptr = nullptr;
if (Segment::CreateInstance(&reader, pos, segment_ptr)) {
return false;
}
std::unique_ptr<Segment> segment(segment_ptr);
long result;
if ((result = segment->Load()) < 0) {
return false;
}
const mkvparser::Cluster* cluster = segment->GetFirst();
if (!cluster || cluster->EOS()) {
return false;
}
while (cluster && cluster->EOS() == false) {
if (cluster->GetTimeCode() < 0) {
return false;
}
const mkvparser::BlockEntry* block = nullptr;
if (cluster->GetFirst(block) < 0) {
return false;
}
while (block != NULL && block->EOS() == false) {
if (cluster->GetNext(block, block) < 0) {
return false;
}
}
cluster = segment->GetNext(cluster);
}
parser_out->segment = segment.release();
return true;
}
bool ParseMkvFile(const std::string& webm_file) {
MkvParser parser;
const bool result = ParseMkvFileReleaseParser(webm_file, &parser);
delete parser.segment;
delete parser.reader;
return result;
}
} // namespace test
| 2,350 |
442 | #ifndef _READPASS_H_
#define _READPASS_H_
/* Avoid namespace collisions with other "readpass" functions. */
#ifdef readpass
#undef readpass
#endif
#define readpass libcperciva_readpass
/**
* readpass(passwd, prompt, confirmprompt, devtty):
* If ${devtty} is 0, read a password from stdin. If ${devtty} is 1, read a
* password from /dev/tty if possible; if not, read from stdin. If ${devtty}
* is 2, read a password from /dev/tty if possible; if not, exit with an error.
* If reading from a tty (either /dev/tty or stdin), disable echo and prompt
* the user by printing ${prompt} to stderr. If ${confirmprompt} is non-NULL,
* read a second password (prompting if a terminal is being used) and repeat
* until the user enters the same password twice. Return the password as a
* malloced NUL-terminated string via ${passwd}.
*/
int readpass(char **, const char *, const char *, int);
/**
* readpass_file(passwd, filename):
* Read a passphrase from ${filename} and return it as a malloced
* NUL-terminated string via ${passwd}. Print an error and fail if the file
* is 2048 characters or more, or if it contains any newline \n or \r\n
* characters other than at the end of the file. Do not include the \n or
* \r\n characters in the passphrase.
*/
int readpass_file(char **, const char *);
#endif /* !_READPASS_H_ */
| 420 |
14,668 | // Copyright 2021 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#ifndef THIRD_PARTY_BLINK_RENDERER_CORE_FRAGMENT_DIRECTIVE_CSS_SELECTOR_DIRECTIVE_H_
#define THIRD_PARTY_BLINK_RENDERER_CORE_FRAGMENT_DIRECTIVE_CSS_SELECTOR_DIRECTIVE_H_
#include "third_party/blink/renderer/core/frame/directive.h"
#include "third_party/blink/renderer/platform/weborigin/kurl.h"
namespace blink {
// Parses directive string and extracts value part of CssSelectorDirective
// to be used with QuerySelector() for finding the element
// https://github.com/WICG/scroll-to-text-fragment/blob/main/EXTENSIONS.md#proposed-solution
// TODO(crbug/1265423): Rename to SelectorDirective
class CssSelectorDirective : public Directive {
public:
static CssSelectorDirective* TryParse(const String& directive_string);
static Type ClassType() { return kSelector; }
explicit CssSelectorDirective(const String& value);
const AtomicString value() const { return value_; }
protected:
String ToStringImpl() const override;
private:
// an accepted CSS selector string specified in the directive's value field,
AtomicString value_;
};
} // namespace blink
#endif // THIRD_PARTY_BLINK_RENDERER_CORE_FRAGMENT_DIRECTIVE_CSS_SELECTOR_DIRECTIVE_H_
| 437 |
1,131 | <reponame>Jerome226/Create<filename>src/main/java/com/simibubi/create/content/contraptions/wrench/WrenchModel.java
package com.simibubi.create.content.contraptions.wrench;
import com.simibubi.create.foundation.item.render.CreateCustomRenderedItemModel;
import net.minecraft.client.renderer.model.IBakedModel;
public class WrenchModel extends CreateCustomRenderedItemModel {
public WrenchModel(IBakedModel template) {
super(template, "wrench");
addPartials("gear");
}
}
| 157 |
622 | """
Tabular data as a nested list.
"""
# Programming language popularity, from www.tiobe.com/tiobe-index
popularity = [["Language", 2017, 2012, 2007, 2002, 1997, 1992, 1987],
["Java", 1, 2, 1, 1, 15, 0, 0],
["C", 2, 1, 2, 2, 1, 1, 1],
["C++", 3, 3, 3, 3, 2, 2, 5],
["C#", 4, 4, 7, 13, 0, 0, 0],
["Python", 5, 7, 6, 11, 27, 0, 0],
["Visual Basic .NET", 6, 17, 0, 0, 0, 0, 0],
["PHP", 7, 6, 4, 5, 0, 0, 0],
["JavaScript", 8, 9, 8, 7, 23, 0, 0],
["Perl", 9, 8, 5, 4, 4, 10, 0]]
format_string = "{:<20} {:>4} {:>4} {:>4} {:>4} {:>4} {:>4} {:>4}"
# Display langauges table
headers = popularity[0]
header_row = format_string.format(*headers)
print(header_row)
print("-" * len(header_row))
for language in popularity[1:]:
print(format_string.format(*language))
print("")
# Finding/selecting items
# What was Python's popularity in 1997?
print("Python's popularity in 1997:", popularity[5][5])
def find_col(table, col):
"""
Return column index with col header in table
or -1 if col is not in table
"""
return table[0].index(col)
def find_row(table, row):
"""
Return row index with row header in table
or -1 if row is not in table
"""
for idx in range(len(table)):
if table[idx][0] == row:
return idx
return -1
idx1997 = find_col(popularity, 1997)
idxpython = find_row(popularity, "Python")
print("Python's popularity in 1997:", popularity[idxpython][idx1997])
| 716 |
375 | <reponame>obrienben/openwayback
/*
* This file is part of the Wayback archival access software
* (http://archive-access.sourceforge.net/projects/wayback/).
*
* Licensed to the Internet Archive (IA) by one or more individual
* contributors.
*
* The IA licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.archive.wayback.archivalurl;
import org.archive.wayback.RequestParser;
import org.archive.wayback.archivalurl.requestparser.ArchivalUrlFormRequestParser;
import org.archive.wayback.archivalurl.requestparser.DatelessReplayRequestParser;
import org.archive.wayback.archivalurl.requestparser.PathDatePrefixQueryRequestParser;
import org.archive.wayback.archivalurl.requestparser.PathDateRangeQueryRequestParser;
import org.archive.wayback.archivalurl.requestparser.PathPrefixDatePrefixQueryRequestParser;
import org.archive.wayback.archivalurl.requestparser.PathPrefixDateRangeQueryRequestParser;
import org.archive.wayback.archivalurl.requestparser.ReplayRequestParser;
import org.archive.wayback.memento.TimeMapRequestParser;
import org.archive.wayback.requestparser.CompositeRequestParser;
import org.archive.wayback.requestparser.OpenSearchRequestParser;
/**
* CompositeRequestParser that handles Archival Url Replay and Query requests,
* in addition to "standard" OpenSearch and Form RequestParsers.
*
* <p>Typically, this class is set to AccessPoint to configure ArchivalUrl
* access point.</p>
*
* <h4>Refactoring Note</h4>
* <p>It doesn't make much sense to split Archival-Url request parsing
* into such fine-grained classes. It just make code less efficient, more difficult to maintain.
* It is hard to think of the situation where these sub-parsers are customized separately, and
* order is very important.
* It also sounds weird to include TimeMapRequestParser in <em>ArchivalUrlRequestParser</em>, even if
* TimeMapRequestParser works only in Archival-Url space.
* Refactor these classes into one ArchivalUrl replay/query request parser, and rename this class.
* Probably this class may be bundled with other ArchivalUrl-related classes for better abstraction.</p>
*
* @see org.archive.wayback.webapp.AccessPoint#getParser
* @see org.archive.wayback.webapp.AccessPoint#handleRequest
* @author brad
*/
public class ArchivalUrlRequestParser extends CompositeRequestParser {
// TODO: move these constants to where they are actually used.
// ArchivalUrl? ReplayRequestParser?
/**
* delimiter character for datespec flags
*/
public final static String FLAG_DELIM = "_";
/**
* text/javascript context
*/
public final static String JS_CONTEXT = "js";
/**
* text/css context
*/
public final static String CSS_CONTEXT = "cs";
/**
* image/* context
*/
public final static String IMG_CONTEXT = "im";
/**
* raw/identity context
*/
public final static String IDENTITY_CONTEXT = "id";
/**
* frame-wrapper context
*/
public final static String FRAME_WRAPPED_CONTEXT = "fw";
/**
* iframe-wrapped context
*/
public final static String IFRAME_WRAPPED_CONTEXT = "if";
/**
* object/embed wrapped context
*/
public final static String OBJECT_EMBED_WRAPPED_CONTEXT = "oe";
/**
* Charset detection strategy context - should be followed by an integer
* indicating which strategy to use
*/
public final static String CHARSET_MODE = "cm";
protected RequestParser[] getRequestParsers() {
RequestParser[] theParsers = {
new ReplayRequestParser(this),
new TimeMapRequestParser(this),
new PathDatePrefixQueryRequestParser(this),
new PathDateRangeQueryRequestParser(this),
new PathPrefixDatePrefixQueryRequestParser(this),
new PathPrefixDateRangeQueryRequestParser(this),
new OpenSearchRequestParser(this),
new ArchivalUrlFormRequestParser(this),
new DatelessReplayRequestParser(this)
};
return theParsers;
}
}
| 1,280 |
1,093 | /*
* Copyright 2002-2021 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.integration.jms.config;
import static org.assertj.core.api.Assertions.assertThat;
import org.junit.jupiter.api.Test;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.context.ApplicationContext;
import org.springframework.integration.channel.MessagePublishingErrorHandler;
import org.springframework.integration.channel.NullChannel;
import org.springframework.integration.channel.PublishSubscribeChannel;
import org.springframework.integration.context.IntegrationContextUtils;
import org.springframework.integration.test.util.TestUtils;
import org.springframework.messaging.MessageChannel;
import org.springframework.scheduling.concurrent.ThreadPoolTaskScheduler;
import org.springframework.test.annotation.DirtiesContext;
import org.springframework.test.context.junit.jupiter.SpringJUnitConfig;
import org.springframework.util.ErrorHandler;
/**
* @author <NAME>
* @author <NAME>
*
* @since 1.0.3
*/
@SpringJUnitConfig
@DirtiesContext
public class DefaultConfigurationTests {
@Autowired
private ApplicationContext context;
@Test
public void verifyErrorChannel() {
Object errorChannel = context.getBean("errorChannel");
assertThat(errorChannel).isNotNull();
assertThat(errorChannel.getClass()).isEqualTo(PublishSubscribeChannel.class);
}
@Test
public void verifyNullChannel() {
Object nullChannel = context.getBean("nullChannel");
assertThat(nullChannel).isNotNull();
assertThat(nullChannel.getClass()).isEqualTo(NullChannel.class);
}
@Test
public void verifyTaskScheduler() {
Object taskScheduler = context.getBean(IntegrationContextUtils.TASK_SCHEDULER_BEAN_NAME);
assertThat(taskScheduler.getClass()).isEqualTo(ThreadPoolTaskScheduler.class);
ErrorHandler errorHandler = TestUtils.getPropertyValue(taskScheduler, "errorHandler", ErrorHandler.class);
assertThat(errorHandler.getClass()).isEqualTo(MessagePublishingErrorHandler.class);
MessageChannel defaultErrorChannel = TestUtils.getPropertyValue(errorHandler,
"messagingTemplate.defaultDestination", MessageChannel.class);
assertThat(defaultErrorChannel).isNull();
errorHandler.handleError(new Throwable());
defaultErrorChannel = TestUtils.getPropertyValue(errorHandler, "messagingTemplate.defaultDestination",
MessageChannel.class);
assertThat(defaultErrorChannel).isNotNull();
assertThat(defaultErrorChannel).isEqualTo(context.getBean(IntegrationContextUtils.ERROR_CHANNEL_BEAN_NAME));
}
}
| 912 |
3,102 | <gh_stars>1000+
//
// Verify -fgnuc-version parsing
//
// RUN: %clang -c %s -target i686-linux -### 2>&1 | FileCheck %s -check-prefix GNUC-DEFAULT
// GNUC-DEFAULT: "-fgnuc-version=4.2.1"
// RUN: %clang -c %s -target i686-linux -fgnuc-version=100.99.99 -### 2>&1 | FileCheck %s -check-prefix GNUC-OVERRIDE
// GNUC-OVERRIDE: "-fgnuc-version=100.99.99"
// RUN: %clang -c %s -target i686-linux -fgnuc-version=0 -### 2>&1 | FileCheck %s -check-prefix GNUC-DISABLE
// RUN: %clang -c %s -target i686-linux -fgnuc-version= -### 2>&1 | FileCheck %s -check-prefix GNUC-DISABLE
// GNUC-DISABLE-NOT: "-fgnuc-version=
// RUN: not %clang -c %s -target i686-linux -fgnuc-version=100.100.10 2>&1 | FileCheck %s -check-prefix GNUC-INVALID
// RUN: not %clang -c %s -target i686-linux -fgnuc-version=100.10.100 2>&1 | FileCheck %s -check-prefix GNUC-INVALID
// RUN: not %clang -c %s -target i686-linux -fgnuc-version=-1.0.0 2>&1 | FileCheck %s -check-prefix GNUC-INVALID
// GNUC-INVALID: error: invalid value {{.*}} in '-fgnuc-version={{.*}}'
// RUN: %clang -fgnuc-version=100.99.99 %s -dM -E -o - | FileCheck %s -check-prefix GNUC-LARGE
// GNUC-LARGE: #define __GNUC_MINOR__ 99
// GNUC-LARGE: #define __GNUC_PATCHLEVEL__ 99
// GNUC-LARGE: #define __GNUC__ 100
// RUN: %clang -fgnuc-version=100.99.99 -x c++ %s -dM -E -o - | FileCheck %s -check-prefix GXX-LARGE
// GXX-LARGE: #define __GNUG__ 100
| 604 |
602 | <filename>test/src/test/java/org/corfudb/runtime/view/ManagementViewTest.java
package org.corfudb.runtime.view;
import com.google.common.reflect.TypeToken;
import lombok.Getter;
import lombok.extern.slf4j.Slf4j;
import org.corfudb.infrastructure.SequencerServer;
import org.corfudb.infrastructure.ServerContext;
import org.corfudb.infrastructure.ServerContextBuilder;
import org.corfudb.infrastructure.TestLayoutBuilder;
import org.corfudb.infrastructure.TestServerRouter;
import org.corfudb.protocols.wireprotocol.NodeState;
import org.corfudb.protocols.wireprotocol.SequencerMetrics.SequencerStatus;
import org.corfudb.protocols.wireprotocol.TokenResponse;
import org.corfudb.protocols.wireprotocol.failuredetector.NodeConnectivity.NodeConnectivityType;
import org.corfudb.runtime.CorfuRuntime;
import org.corfudb.runtime.clients.TestRule;
import org.corfudb.runtime.collections.CorfuTable;
import org.corfudb.runtime.collections.ICorfuTable;
import org.corfudb.runtime.exceptions.AbortCause;
import org.corfudb.runtime.exceptions.ServerNotReadyException;
import org.corfudb.runtime.exceptions.TransactionAbortedException;
import org.corfudb.runtime.object.ICorfuSMR;
import org.corfudb.runtime.proto.service.CorfuMessage.RequestPayloadMsg.PayloadCase;
import org.corfudb.runtime.proto.service.CorfuMessage.ResponsePayloadMsg;
import org.corfudb.runtime.view.ClusterStatusReport.ClusterStatus;
import org.corfudb.runtime.view.ClusterStatusReport.ConnectivityStatus;
import org.corfudb.runtime.view.ClusterStatusReport.NodeStatus;
import org.corfudb.runtime.view.stream.IStreamView;
import org.junit.Assert;
import org.junit.Test;
import java.util.Arrays;
import java.util.Collections;
import java.util.List;
import java.util.Map;
import java.util.Random;
import java.util.UUID;
import java.util.concurrent.CompletableFuture;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.Semaphore;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicBoolean;
import static org.assertj.core.api.Assertions.assertThat;
import static org.assertj.core.api.Assertions.assertThatThrownBy;
import static org.corfudb.test.TestUtils.setAggressiveTimeouts;
import static org.corfudb.test.TestUtils.waitForLayoutChange;
import static org.junit.Assert.fail;
/**
* Test to verify the Management Server functionalities.
*
* Created by zlokhandwala on 11/9/16.
*/
@Slf4j
public class ManagementViewTest extends AbstractViewTest {
@Getter
protected CorfuRuntime corfuRuntime = null;
private void waitForSequencerToBootstrap(int primarySequencerPort) throws InterruptedException {
// Waiting for sequencer to be bootstrapped
for (int i = 0; i < PARAMETERS.NUM_ITERATIONS_MODERATE; i++) {
if (getSequencer(primarySequencerPort).getSequencerEpoch() != Layout.INVALID_EPOCH) {
return;
}
TimeUnit.MILLISECONDS.sleep(PARAMETERS.TIMEOUT_SHORT.toMillis());
}
Assert.fail();
}
/**
* Scenario with 2 nodes: SERVERS.PORT_0 and SERVERS.PORT_1.
* We fail SERVERS.PORT_0 and then listen to intercept the message
* sent by SERVERS.PORT_1's client to the server to handle the failure.
*
* @throws Exception
*/
@Test
public void invokeFailureHandler()
throws Exception {
// Boolean flag turned to true when the REPORT_FAILURE message
// is sent by the Management client to its server.
final Semaphore failureDetected = new Semaphore(1, true);
addServer(SERVERS.PORT_0);
addServer(SERVERS.PORT_1);
Layout l = new TestLayoutBuilder()
.setEpoch(1L)
.addLayoutServer(SERVERS.PORT_0)
.addLayoutServer(SERVERS.PORT_1)
.addSequencer(SERVERS.PORT_1)
.buildSegment()
.buildStripe()
.addLogUnit(SERVERS.PORT_0)
.addLogUnit(SERVERS.PORT_1)
.addToSegment()
.addToLayout()
.build();
bootstrapAllServers(l);
// Shutting down causes loss of heartbeat requests and responses from this node.
getManagementServer(SERVERS.PORT_0).shutdown();
CorfuRuntime corfuRuntime = getRuntime(l).connect();
// Set aggressive timeouts.
setAggressiveTimeouts(l, corfuRuntime,
getManagementServer(SERVERS.PORT_1).getManagementAgent().getCorfuRuntime());
failureDetected.acquire();
// Adding a rule on SERVERS.PORT_0 to drop all packets
addServerRule(SERVERS.PORT_0, new TestRule().always().drop());
// Adding a rule on SERVERS.PORT_1 to toggle the flag when it sends the
// REPORT_FAILURE message.
addClientRule(getManagementServer(SERVERS.PORT_1).getManagementAgent().getCorfuRuntime(),
new TestRule().requestMatches(msg -> {
if (msg.getPayload().getPayloadCase().equals(PayloadCase.REPORT_FAILURE_REQUEST)) {
failureDetected.release();
}
return true;
}));
assertThat(failureDetected.tryAcquire(PARAMETERS.TIMEOUT_LONG.toNanos(),
TimeUnit.NANOSECONDS)).isEqualTo(true);
}
/**
* Scenario with 3 nodes: SERVERS.PORT_0, SERVERS.PORT_1 and SERVERS.PORT_2.
* We fail SERVERS.PORT_1 and then wait for one of the other two servers to
* handle this failure, propose a new layout. The test asserts on a stable
* layout. The failure is handled by removing the failed node.
*/
@Test
public void removeSingleNodeFailure() {
addServer(SERVERS.PORT_0);
addServer(SERVERS.PORT_1);
addServer(SERVERS.PORT_2);
Layout l = new TestLayoutBuilder()
.setEpoch(1L)
.addLayoutServer(SERVERS.PORT_0)
.addLayoutServer(SERVERS.PORT_1)
.addLayoutServer(SERVERS.PORT_2)
.addSequencer(SERVERS.PORT_0)
.buildSegment()
.buildStripe()
.addLogUnit(SERVERS.PORT_0)
.addLogUnit(SERVERS.PORT_2)
.addToSegment()
.addToLayout()
.build();
bootstrapAllServers(l);
CorfuRuntime corfuRuntime = getRuntime(l).connect();
// Setting aggressive timeouts for connect, retry, response timeouts
setAggressiveTimeouts(l, corfuRuntime,
getManagementServer(SERVERS.PORT_0).getManagementAgent().getCorfuRuntime(),
getManagementServer(SERVERS.PORT_1).getManagementAgent().getCorfuRuntime(),
getManagementServer(SERVERS.PORT_2).getManagementAgent().getCorfuRuntime());
// Setting aggressive timeouts for failure and healing detectors
setAggressiveDetectorTimeouts(SERVERS.PORT_0, SERVERS.PORT_1, SERVERS.PORT_2);
// Adding a rule on SERVERS.PORT_1 to drop all packets
addServerRule(SERVERS.PORT_1, new TestRule().always().drop());
getManagementServer(SERVERS.PORT_1).shutdown();
// Waiting until a stable layout is committed
waitForLayoutChange(layout -> layout.getUnresponsiveServers().contains(SERVERS.ENDPOINT_1) &&
layout.getUnresponsiveServers().size() == 1,
corfuRuntime);
// Verifying layout and remove of failed server
Layout l2 = corfuRuntime.getLayoutView().getLayout();
assertThat(l2.getEpoch()).isGreaterThan(l.getEpoch());
assertThat(l2.getLayoutServers().size()).isEqualTo(l.getAllServers().size());
assertThat(l2.getAllActiveServers().size()).isEqualTo(l.getAllServers().size() - 1);
assertThat(l2.getUnresponsiveServers()).contains(SERVERS.ENDPOINT_1);
}
private Layout getManagementTestLayout() throws InterruptedException {
addServer(SERVERS.PORT_0);
addServer(SERVERS.PORT_1);
addServer(SERVERS.PORT_2);
Layout l = new TestLayoutBuilder()
.setEpoch(1L)
.addLayoutServer(SERVERS.PORT_0)
.addLayoutServer(SERVERS.PORT_1)
.addLayoutServer(SERVERS.PORT_2)
.addSequencer(SERVERS.PORT_0)
.addSequencer(SERVERS.PORT_1)
.addSequencer(SERVERS.PORT_2)
.buildSegment()
.buildStripe()
.addLogUnit(SERVERS.PORT_1)
.addLogUnit(SERVERS.PORT_2)
.addToSegment()
.addToLayout()
.setClusterId(UUID.randomUUID())
.build();
bootstrapAllServers(l);
corfuRuntime = getRuntime(l).connect();
waitForSequencerToBootstrap(SERVERS.PORT_0);
// Setting aggressive timeouts
setAggressiveTimeouts(l, corfuRuntime,
getManagementServer(SERVERS.PORT_0).getManagementAgent().getCorfuRuntime(),
getManagementServer(SERVERS.PORT_1).getManagementAgent().getCorfuRuntime(),
getManagementServer(SERVERS.PORT_2).getManagementAgent().getCorfuRuntime());
setAggressiveDetectorTimeouts(SERVERS.PORT_0, SERVERS.PORT_1, SERVERS.PORT_2);
return l;
}
private Layout get3NodeLayout() throws InterruptedException {
addServer(SERVERS.PORT_0);
addServer(SERVERS.PORT_1);
addServer(SERVERS.PORT_2);
Layout l = new TestLayoutBuilder()
.setEpoch(1L)
.addLayoutServer(SERVERS.PORT_0)
.addLayoutServer(SERVERS.PORT_1)
.addLayoutServer(SERVERS.PORT_2)
.addSequencer(SERVERS.PORT_0)
.addSequencer(SERVERS.PORT_1)
.addSequencer(SERVERS.PORT_2)
.buildSegment()
.buildStripe()
.addLogUnit(SERVERS.PORT_0)
.addLogUnit(SERVERS.PORT_1)
.addLogUnit(SERVERS.PORT_2)
.addToSegment()
.addToLayout()
.setClusterId(UUID.randomUUID())
.build();
bootstrapAllServers(l);
corfuRuntime = getRuntime(l).connect();
waitForSequencerToBootstrap(SERVERS.PORT_0);
// Setting aggressive timeouts
setAggressiveTimeouts(l, corfuRuntime,
getManagementServer(SERVERS.PORT_0).getManagementAgent().getCorfuRuntime(),
getManagementServer(SERVERS.PORT_1).getManagementAgent().getCorfuRuntime(),
getManagementServer(SERVERS.PORT_2).getManagementAgent().getCorfuRuntime());
setAggressiveDetectorTimeouts(SERVERS.PORT_0, SERVERS.PORT_1, SERVERS.PORT_2);
return l;
}
/**
* Scenario with 1 node: SERVERS.PORT_0
* The node is setup, bootstrapped and then requested for a
* heartbeat. This is responded with the nodeMetrics which is
* asserted with expected values.
*
* @throws Exception error
*/
@Test
public void checkNodeState() throws Exception {
addServer(SERVERS.PORT_0);
Layout l = new TestLayoutBuilder()
.setEpoch(1L)
.addLayoutServer(SERVERS.PORT_0)
.addSequencer(SERVERS.PORT_0)
.buildSegment()
.buildStripe()
.addLogUnit(SERVERS.PORT_0)
.addToSegment()
.addToLayout()
.build();
bootstrapAllServers(l);
CorfuRuntime corfuRuntime = getRuntime(l).connect();
// Set aggressive timeouts.
setAggressiveTimeouts(l, corfuRuntime,
getManagementServer(SERVERS.PORT_0).getManagementAgent().getCorfuRuntime());
NodeState nodeState= null;
// Send heartbeat requests and wait until we get a valid response.
for (int i = 0; i < PARAMETERS.NUM_ITERATIONS_LOW; i++) {
nodeState = corfuRuntime.getLayoutView().getRuntimeLayout()
.getManagementClient(SERVERS.ENDPOINT_0).sendNodeStateRequest().get();
if (nodeState.getConnectivity().getType() == NodeConnectivityType.CONNECTED
&& !nodeState.getConnectivity().getConnectedNodes().isEmpty()) {
break;
}
TimeUnit.MILLISECONDS.sleep(PARAMETERS.TIMEOUT_VERY_SHORT.toMillis());
}
assertThat(nodeState).isNotNull();
assertThat(nodeState.getConnectivity()).isNotNull();
assertThat(nodeState.getConnectivity().getConnectedNodes()).contains(SERVERS.ENDPOINT_0);
}
/**
* Scenario with 3 nodes: SERVERS.PORT_0, SERVERS.PORT_1 and SERVERS.PORT_2.
* Simulate transient failure of a server leading to a partial seal.
* Allow the management server to detect the partial seal and correct this.
* <p>
* Part 1.
* The partial seal causes SERVERS.PORT_0 to be at epoch 2 whereas,
* SERVERS.PORT_1 & SERVERS.PORT_2 fail to receive this message and are stuck at epoch 1.
* <p>
* Part 2.
* All the 3 servers are now functional and receive all messages.
* <p>
* Part 3.
* The PING message gets rejected by the partially sealed router (WrongEpoch)
* and the management server realizes of the partial seal and corrects this
* by issuing another failure detected message.
*
* @throws Exception
*/
@Test
public void handleTransientFailure() throws Exception {
log.info("Boolean flag turned to true when the REPORT_FAILURE_REQUEST message " +
"is sent by the Management client to its server");
final Semaphore failureDetected = new Semaphore(2, true);
addServer(SERVERS.PORT_0);
addServer(SERVERS.PORT_1);
addServer(SERVERS.PORT_2);
Layout l = new TestLayoutBuilder()
.setEpoch(1L)
.addLayoutServer(SERVERS.PORT_0)
.addLayoutServer(SERVERS.PORT_1)
.addLayoutServer(SERVERS.PORT_2)
.addSequencer(SERVERS.PORT_0)
.addSequencer(SERVERS.PORT_1)
.addSequencer(SERVERS.PORT_2)
.buildSegment()
.buildStripe()
.addLogUnit(SERVERS.PORT_0)
.addLogUnit(SERVERS.PORT_1)
.addLogUnit(SERVERS.PORT_2)
.addToSegment()
.addToLayout()
.setClusterId(UUID.randomUUID())
.build();
bootstrapAllServers(l);
corfuRuntime = getRuntime(l).connect();
waitForSequencerToBootstrap(SERVERS.PORT_0);
log.info("Setting aggressive timeouts");
setAggressiveTimeouts(l, corfuRuntime,
getManagementServer(SERVERS.PORT_0).getManagementAgent().getCorfuRuntime(),
getManagementServer(SERVERS.PORT_1).getManagementAgent().getCorfuRuntime(),
getManagementServer(SERVERS.PORT_2).getManagementAgent().getCorfuRuntime());
setAggressiveDetectorTimeouts(SERVERS.PORT_0, SERVERS.PORT_1, SERVERS.PORT_2);
failureDetected.acquire(2);
log.info("Only allow SERVERS.PORT_0 to manage failures. Prevent the other servers from handling failures.");
TestRule sealTestRule = new TestRule()
.requestMatches(msg -> msg.getPayload().getPayloadCase().equals(PayloadCase.SEAL_REQUEST))
.drop();
TestRule failureTestRule = new TestRule()
.requestMatches(msg -> msg.getPayload().getPayloadCase().equals(PayloadCase.REPORT_FAILURE_REQUEST))
.drop();
addClientRule(getManagementServer(SERVERS.PORT_0).getManagementAgent().getCorfuRuntime(),
SERVERS.ENDPOINT_1, sealTestRule);
addClientRule(getManagementServer(SERVERS.PORT_0).getManagementAgent().getCorfuRuntime(),
SERVERS.ENDPOINT_1, failureTestRule);
addClientRule(getManagementServer(SERVERS.PORT_0).getManagementAgent().getCorfuRuntime(),
SERVERS.ENDPOINT_2, sealTestRule);
addClientRule(getManagementServer(SERVERS.PORT_0).getManagementAgent().getCorfuRuntime(),
SERVERS.ENDPOINT_2, failureTestRule);
addClientRule(getManagementServer(SERVERS.PORT_1).getManagementAgent().getCorfuRuntime(),
SERVERS.ENDPOINT_1, sealTestRule);
addClientRule(getManagementServer(SERVERS.PORT_1).getManagementAgent().getCorfuRuntime(),
SERVERS.ENDPOINT_1, failureTestRule);
addClientRule(getManagementServer(SERVERS.PORT_1).getManagementAgent().getCorfuRuntime(),
SERVERS.ENDPOINT_2, sealTestRule);
addClientRule(getManagementServer(SERVERS.PORT_1).getManagementAgent().getCorfuRuntime(),
SERVERS.ENDPOINT_2, failureTestRule);
addClientRule(getManagementServer(SERVERS.PORT_2).getManagementAgent().getCorfuRuntime(),
SERVERS.ENDPOINT_1, sealTestRule);
addClientRule(getManagementServer(SERVERS.PORT_2).getManagementAgent().getCorfuRuntime(),
SERVERS.ENDPOINT_1, failureTestRule);
addClientRule(getManagementServer(SERVERS.PORT_2).getManagementAgent().getCorfuRuntime(),
SERVERS.ENDPOINT_2, sealTestRule);
addClientRule(getManagementServer(SERVERS.PORT_2).getManagementAgent().getCorfuRuntime(),
SERVERS.ENDPOINT_2, failureTestRule);
// PART 1.
log.info("Simulate ENDPOINT_2 failure from ENDPOINT_1 (only Management Server)");
addClientRule(getManagementServer(SERVERS.PORT_1).getManagementAgent().getCorfuRuntime(),
SERVERS.ENDPOINT_2, new TestRule().always().drop());
log.info("Adding a rule on SERVERS.PORT_1 to toggle the flag when it " +
"sends the REPORT_FAILURE_REQUEST message.");
addClientRule(getManagementServer(SERVERS.PORT_0).getManagementAgent().getCorfuRuntime(),
new TestRule().requestMatches(msg -> {
if (msg.getPayload().getPayloadCase().equals(PayloadCase.REPORT_FAILURE_REQUEST)) {
failureDetected.release();
}
return true;
}));
log.info("Go ahead when sealing of ENDPOINT_0 takes place.");
for (int i = 0; i < PARAMETERS.NUM_ITERATIONS_MODERATE; i++) {
if (getServerRouter(SERVERS.PORT_0).getServerEpoch() == 2L) {
failureDetected.release();
break;
}
Thread.sleep(PARAMETERS.TIMEOUT_VERY_SHORT.toMillis());
}
assertThat(failureDetected.tryAcquire(2, PARAMETERS.TIMEOUT_NORMAL.toNanos(),
TimeUnit.NANOSECONDS)).isEqualTo(true);
addClientRule(getManagementServer(SERVERS.PORT_0).getManagementAgent().getCorfuRuntime(), failureTestRule);
log.info("Assert that only a partial seal was successful. " +
"ENDPOINT_0 sealed. ENDPOINT_1 & ENDPOINT_2 not sealed."
);
assertThat(getServerRouter(SERVERS.PORT_0).getServerEpoch()).isEqualTo(2L);
assertThat(getServerRouter(SERVERS.PORT_1).getServerEpoch()).isEqualTo(1L);
assertThat(getServerRouter(SERVERS.PORT_2).getServerEpoch()).isEqualTo(1L);
assertThat(getLayoutServer(SERVERS.PORT_0).getCurrentLayout().getEpoch()).isEqualTo(1L);
assertThat(getLayoutServer(SERVERS.PORT_1).getCurrentLayout().getEpoch()).isEqualTo(1L);
assertThat(getLayoutServer(SERVERS.PORT_2).getCurrentLayout().getEpoch()).isEqualTo(1L);
// PART 2.
log.info("Simulate normal operations for all servers and clients.");
clearClientRules(getManagementServer(SERVERS.PORT_0).getManagementAgent().getCorfuRuntime());
// PART 3.
log.info("Allow management server to detect partial seal and correct this issue.");
for (int i = 0; i < PARAMETERS.NUM_ITERATIONS_MODERATE; i++) {
Thread.sleep(PARAMETERS.TIMEOUT_SHORT.toMillis());
log.info("Assert successful seal of all servers.");
long server0Epoch = getServerRouter(SERVERS.PORT_0).getServerEpoch();
long server1Epoch = getServerRouter(SERVERS.PORT_1).getServerEpoch();
long server2Epoch = getServerRouter(SERVERS.PORT_2).getServerEpoch();
long server0LayoutEpoch = getLayoutServer(SERVERS.PORT_0).getCurrentLayout().getEpoch();
long server1LayoutEpoch = getLayoutServer(SERVERS.PORT_1).getCurrentLayout().getEpoch();
long server2LayoutEpoch = getLayoutServer(SERVERS.PORT_2).getCurrentLayout().getEpoch();
List<Long> epochs = Arrays.asList(
server0Epoch, server1Epoch, server2Epoch,
server0LayoutEpoch, server1LayoutEpoch, server2LayoutEpoch
);
if (epochs.stream().allMatch(epoch -> epoch == 2)) {
return;
}
log.warn("The seal is not complete yet. Wait for next iteration. Servers epochs: {}", epochs);
}
fail();
}
private void induceSequencerFailureAndWait() {
long currentEpoch = getCorfuRuntime().getLayoutView().getLayout().getEpoch();
// induce a failure to the server on PORT_0, where the current sequencer is active
//
getManagementServer(SERVERS.PORT_0).shutdown();
addServerRule(SERVERS.PORT_0, new TestRule().always().drop());
// wait for failover to install a new epoch (and a new layout)
waitForLayoutChange(layout ->
layout.getEpoch() > currentEpoch && !layout.getPrimarySequencer().equals(SERVERS.ENDPOINT_0),
getCorfuRuntime());
}
/**
* Scenario with 3 nodes: SERVERS.PORT_0, SERVERS.PORT_1 and SERVERS.PORT_2.
* We fail SERVERS.PORT_1 and then wait for one of the other two servers to
* handle this failure, propose a new layout and we assert on the epoch change.
* The failure is handled by ConserveFailureHandlerPolicy.
* No nodes are removed from the layout, but are marked unresponsive.
* A sequencer failover takes place where the next working sequencer is reset
* and made the primary.
*/
@Test
public void testSequencerFailover() throws Exception {
getManagementTestLayout();
final long beforeFailure = 5L;
final long afterFailure = 10L;
IStreamView sv = getCorfuRuntime().getStreamsView().get(CorfuRuntime.getStreamID("streamA"));
byte[] testPayload = "hello world".getBytes();
sv.append(testPayload);
sv.append(testPayload);
sv.append(testPayload);
sv.append(testPayload);
sv.append(testPayload);
assertThat(getSequencer(SERVERS.PORT_0).getGlobalLogTail()).isEqualTo(beforeFailure);
assertThat(getSequencer(SERVERS.PORT_1).getGlobalLogTail()).isEqualTo(0L);
induceSequencerFailureAndWait();
waitForLayoutChange(layout -> layout.getUnresponsiveServers().size() == 1
&& layout.getUnresponsiveServers().contains(SERVERS.ENDPOINT_0), getCorfuRuntime());
Layout newLayout = new Layout(getCorfuRuntime().getLayoutView().getLayout());
// Block until new sequencer reaches READY state.
TokenResponse tokenResponse = getCorfuRuntime().getSequencerView().query();
// verify that a failover sequencer was started with the correct starting-tail
assertThat(tokenResponse.getSequence()).isEqualTo(beforeFailure - 1);
sv.append(testPayload);
sv.append(testPayload);
sv.append(testPayload);
sv.append(testPayload);
sv.append(testPayload);
assertThat(newLayout.getUnresponsiveServers()).containsExactly(SERVERS.ENDPOINT_0);
tokenResponse = getCorfuRuntime().getSequencerView().query();
assertThat(tokenResponse.getSequence()).isEqualTo(afterFailure - 1);
}
protected <T extends ICorfuSMR> Object instantiateCorfuObject(TypeToken<T> tType, String name) {
return getCorfuRuntime().getObjectsView()
.build()
.setStreamName(name) // stream name
.setTypeToken(tType) // a TypeToken of the specified class
.open(); // instantiate the object!
}
protected ICorfuTable<Integer, String> getMap() {
ICorfuTable<Integer, String> testMap;
testMap = (ICorfuTable<Integer, String>) instantiateCorfuObject(
new TypeToken<CorfuTable<Integer, String>>() {
}, "test stream"
);
return testMap;
}
protected void TXBegin() {
getCorfuRuntime().getObjectsView().TXBegin();
}
protected void TXEnd() {
getCorfuRuntime().getObjectsView().TXEnd();
}
/**
* Check that transaction conflict resolution works properly in face of sequencer failover
*/
@Test
public void ckSequencerFailoverTXResolution() throws Exception {
// setup 3-Corfu node cluster
getManagementTestLayout();
Map<Integer, String> map = getMap();
// start a transaction and force it to obtain snapshot timestamp
// preceding the sequencer failover
t(0, () -> {
TXBegin();
map.get(0);
});
final String payload = "hello";
final int nUpdates = 5;
// in another thread, fill the log with a few entries
t(1, () -> {
for (int i = 0; i < nUpdates; i++)
map.put(i, payload);
});
// now, the tail of the log is at nUpdates;
// kill the sequencer, wait for a failover,
// and then resume the transaction above; it should abort
// (unnecessarily, but we are being conservative)
//
induceSequencerFailureAndWait();
t(0, () -> {
boolean commit = true;
map.put(nUpdates + 1, payload); // should not conflict
try {
TXEnd();
} catch (TransactionAbortedException ta) {
assertThat(ta.getAbortCause()).isEqualTo(AbortCause.NEW_SEQUENCER);
commit = false;
}
assertThat(commit)
.isFalse();
});
// now, check that the same scenario, starting a new, can succeed
t(0, () -> {
TXBegin();
map.get(0);
});
// in another thread, fill the log with a few entries
t(1, () -> {
for (int i = 0; i < nUpdates; i++)
map.put(i, payload + 1);
});
t(0, () -> {
boolean commit = true;
map.put(nUpdates + 1, payload); // should not conflict
try {
TXEnd();
} catch (TransactionAbortedException ta) {
commit = false;
}
assertThat(commit)
.isTrue();
});
}
/**
* small variant on the above : don't start the first TX at the start of the log.
*/
@Test
public void ckSequencerFailoverTXResolution1() throws Exception {
getManagementTestLayout();
Map<Integer, String> map = getMap();
final String payload = "hello";
final int nUpdates = 5;
for (int i = 0; i < nUpdates; i++)
map.put(i, payload);
// start a transaction and force it to obtain snapshot timestamp
// preceding the sequencer failover
t(0, () -> {
TXBegin();
map.get(0);
});
// in another thread, fill the log with a few entries
t(1, () -> {
for (int i = 0; i < nUpdates; i++)
map.put(i, payload + 1);
});
// now, the tail of the log is at nUpdates;
// kill the sequencer, wait for a failover,
// and then resume the transaction above; it should abort
// (unnecessarily, but we are being conservative)
//
induceSequencerFailureAndWait();
t(0, () -> {
boolean commit = true;
map.put(nUpdates + 1, payload); // should not conflict
try {
TXEnd();
} catch (TransactionAbortedException ta) {
assertThat(ta.getAbortCause()).isEqualTo(AbortCause.NEW_SEQUENCER);
commit = false;
}
assertThat(commit)
.isFalse();
});
// now, check that the same scenario, starting anew, can succeed
t(0, () -> {
TXBegin();
map.get(0);
});
// in another thread, fill the log with a few entries
t(1, () -> {
for (int i = 0; i < nUpdates; i++)
map.put(i, payload + 2);
});
t(0, () -> {
boolean commit = true;
map.put(nUpdates + 1, payload); // should not conflict
try {
TXEnd();
} catch (TransactionAbortedException ta) {
commit = false;
}
assertThat(commit)
.isTrue();
});
}
/**
* When a stream is seen for the first time by the sequencer it returns a -1
* in the backpointer map.
* After failover, the new sequencer returns a null in the backpointer map
* forcing it to single step backwards and get the last backpointer for the
* given stream.
* An example is shown below:
* <p>
* Index : 0 1 2 3 | | 4 5 6 7 8
* Stream : A B A B | failover | A C A B B
* B.P : -1 -1 0 1 | | 2 -1 4 3 7
* <p>
* -1 : New StreamID so empty backpointers
* X : (null) Unknown backpointers as this is a failed-over sequencer.
* <p>
*/
@Test
public void sequencerFailoverBackpointerCheck() throws Exception {
getManagementTestLayout();
UUID streamA = UUID.nameUUIDFromBytes("stream A".getBytes());
UUID streamB = UUID.nameUUIDFromBytes("stream B".getBytes());
UUID streamC = UUID.nameUUIDFromBytes("stream C".getBytes());
final long streamA_backpointerRecovered = 2L;
final long streamB_backpointerRecovered = 3L;
final long streamA_backpointerFinal = 4L;
final long streamB_backpointerFinal = 7L;
getTokenWriteAndAssertBackPointer(streamA, Address.NON_EXIST);
getTokenWriteAndAssertBackPointer(streamB, Address.NON_EXIST);
getTokenWriteAndAssertBackPointer(streamA, 0L);
getTokenWriteAndAssertBackPointer(streamB, 1L);
induceSequencerFailureAndWait();
getTokenWriteAndAssertBackPointer(streamA, streamA_backpointerRecovered);
getTokenWriteAndAssertBackPointer(streamC, Address.NON_EXIST);
getTokenWriteAndAssertBackPointer(streamA, streamA_backpointerFinal);
getTokenWriteAndAssertBackPointer(streamB, streamB_backpointerRecovered);
getTokenWriteAndAssertBackPointer(streamB, streamB_backpointerFinal);
}
/**
* Requests for a token for the given stream ID.
* Asserts the backpointer map in the token response with the specified backpointer location.
* Writes test data to the log unit servers using the tokenResponse.
*
* @param streamID Stream ID to request token for.
* @param expectedBackpointerValue Expected backpointer for given stream.
*/
private void getTokenWriteAndAssertBackPointer(UUID streamID, Long expectedBackpointerValue) {
TokenResponse tokenResponse =
corfuRuntime.getSequencerView().next(streamID);
if (expectedBackpointerValue == null) {
assertThat(tokenResponse.getBackpointerMap()).isEmpty();
} else {
assertThat(tokenResponse.getBackpointerMap()).containsEntry(streamID, expectedBackpointerValue);
}
corfuRuntime.getAddressSpaceView().write(tokenResponse,
"test".getBytes());
}
/**
* Asserts that we cannot fetch a token after a failure but before sequencer reset.
*
* @throws Exception
*/
@Test
public void blockRecoverySequencerUntilReset() throws Exception {
final Semaphore resetDetected = new Semaphore(1);
Layout layout = getManagementTestLayout();
UUID streamA = UUID.nameUUIDFromBytes("stream A".getBytes());
getTokenWriteAndAssertBackPointer(streamA, Address.NON_EXIST);
resetDetected.acquire();
// Allow only SERVERS.PORT_0 to handle the failure.
// Preventing PORT_2 from bootstrapping the sequencer.
addClientRule(getManagementServer(SERVERS.PORT_2).getManagementAgent().getCorfuRuntime(),
new TestRule().requestMatches(msg ->
msg.getPayload().getPayloadCase().equals(PayloadCase.BOOTSTRAP_SEQUENCER_REQUEST)).drop());
addClientRule(getManagementServer(SERVERS.PORT_1).getManagementAgent().getCorfuRuntime(),
new TestRule().requestMatches(msg -> {
if (msg.getPayload().getPayloadCase().equals(PayloadCase.BOOTSTRAP_SEQUENCER_REQUEST)) {
try {
// There is a failure but the BOOTSTRAP_SEQUENCER message has not yet been
// sent. So if we request a token now, we should be denied as the
// server is sealed and we get a WrongEpochException.
corfuRuntime.getLayoutView().getRuntimeLayout(layout)
.getSequencerClient(SERVERS.ENDPOINT_1)
.nextToken(Collections.singletonList(CorfuRuntime
.getStreamID("testStream")), 1).get();
fail();
} catch (InterruptedException | ExecutionException e) {
resetDetected.release();
}
}
return false;
}));
// Inducing failure on PORT_1
induceSequencerFailureAndWait();
assertThat(resetDetected
.tryAcquire(PARAMETERS.TIMEOUT_NORMAL.toMillis(), TimeUnit.MILLISECONDS))
.isTrue();
// We should be able to request a token now.
corfuRuntime.getSequencerView().next(CorfuRuntime.getStreamID("testStream"));
}
@Test
public void sealDoesNotModifyClientEpoch() throws Exception {
Layout l = getManagementTestLayout();
// Seal
Layout newLayout = new Layout(l);
newLayout.setEpoch(newLayout.getEpoch() + 1);
corfuRuntime.getLayoutView().getRuntimeLayout(newLayout).sealMinServerSet();
assertThat(corfuRuntime.getLayoutView().getLayout().getEpoch()).isEqualTo(l.getEpoch());
}
/**
* Checks for updates trailing layout servers.
* The layout is partially committed with epoch 2 except for ENDPOINT_0.
* All commit messages from the cluster are intercepted.
* The test checks whether at least one of the 3 management agents patches the layout server with the latest
* layout.
* If a commit message with any other epoch is sent, the test fails.
*/
@Test
public void updateTrailingLayoutServers() throws Exception {
Layout layout = new Layout(getManagementTestLayout());
AtomicBoolean commitWithDifferentEpoch = new AtomicBoolean(false);
final CountDownLatch latch = new CountDownLatch(1);
TestRule interceptCommit = new TestRule().requestMatches(msg -> {
if (msg.getPayload().getPayloadCase().equals(PayloadCase.COMMIT_LAYOUT_REQUEST)) {
if (msg.getPayload().getCommitLayoutRequest().getEpoch() == 2) {
latch.countDown();
} else {
commitWithDifferentEpoch.set(true);
}
}
return true;
});
addClientRule(getManagementServer(SERVERS.PORT_0).getManagementAgent().getCorfuRuntime(), interceptCommit);
addClientRule(getManagementServer(SERVERS.PORT_1).getManagementAgent().getCorfuRuntime(), interceptCommit);
addClientRule(getManagementServer(SERVERS.PORT_2).getManagementAgent().getCorfuRuntime(), interceptCommit);
final long highRank = 10L;
addClientRule(corfuRuntime, SERVERS.ENDPOINT_0, new TestRule().always().drop());
layout.setEpoch(2L);
corfuRuntime.getLayoutView().getRuntimeLayout(layout).sealMinServerSet();
// We increase to a higher rank to avoid being outranked. We could be outranked if the management
// agent attempts to fill in the epoch slot before we update.
corfuRuntime.getLayoutView().updateLayout(layout, highRank);
for (int i = 0; i < PARAMETERS.NUM_ITERATIONS_MODERATE; i++) {
Thread.sleep(PARAMETERS.TIMEOUT_SHORT.toMillis());
if (getLayoutServer(SERVERS.PORT_0).getCurrentLayout().equals(layout))
break;
}
assertThat(getLayoutServer(SERVERS.PORT_0).getCurrentLayout().getEpoch()).isEqualTo(2L);
assertThat(getLayoutServer(SERVERS.PORT_0).getCurrentLayout()).isEqualTo(layout);
latch.await();
assertThat(commitWithDifferentEpoch.get()).isFalse();
}
/**
* Tests a 3 node cluster.
* All Prepare messages are first blocked. Then a seal is issued for epoch 2.
* The test then ensures that no layout is committed for the epoch 2.
* We ensure that no layout is committed other than the Paxos path.
*/
@Test
public void blockLayoutUpdateAfterSeal() throws InterruptedException {
Layout layout = new Layout(getManagementTestLayout());
TestRule dropPrepareMsg = new TestRule()
.requestMatches(msg -> msg.getPayload().getPayloadCase().equals(PayloadCase.PREPARE_LAYOUT_REQUEST))
.drop();
// Block Paxos round by blocking all prepare methods.
addClientRule(getManagementServer(SERVERS.PORT_0).getManagementAgent().getCorfuRuntime(), dropPrepareMsg);
addClientRule(getManagementServer(SERVERS.PORT_1).getManagementAgent().getCorfuRuntime(), dropPrepareMsg);
addClientRule(getManagementServer(SERVERS.PORT_2).getManagementAgent().getCorfuRuntime(), dropPrepareMsg);
// Seal the layout.
layout.setEpoch(2L);
corfuRuntime.getLayoutView().getRuntimeLayout(layout).sealMinServerSet();
// Wait for the cluster to move the layout with epoch 2 without the Paxos round.
for (int i = 0; i < PARAMETERS.NUM_ITERATIONS_LOW; i++) {
TimeUnit.MILLISECONDS.sleep(PARAMETERS.TIMEOUT_VERY_SHORT.toMillis());
if (corfuRuntime.getLayoutView().getLayout().getEpoch() == 2L) {
fail();
}
corfuRuntime.invalidateLayout();
}
assertThat(corfuRuntime.getLayoutView().getLayout().getEpoch()).isEqualTo(1L);
}
/**
* A single node cluster is sealed but the client crashes before it can propose a layout.
* The management server now have to detect this state and fill up this slot with an existing
* layout in order to unblock the data plane operations.
*
* @throws Exception
*/
@Test
public void unblockSealedCluster() throws Exception {
CorfuRuntime corfuRuntime = getDefaultRuntime();
Layout l = new Layout(corfuRuntime.getLayoutView().getLayout());
setAggressiveDetectorTimeouts(SERVERS.PORT_0);
waitForSequencerToBootstrap(SERVERS.PORT_0);
l.setEpoch(l.getEpoch() + 1);
corfuRuntime.getLayoutView().getRuntimeLayout(l).sealMinServerSet();
for (int i = 0; i < PARAMETERS.NUM_ITERATIONS_MODERATE; i++) {
Thread.sleep(PARAMETERS.TIMEOUT_SHORT.toMillis());
if (corfuRuntime.getLayoutView().getLayout().getEpoch() == l.getEpoch()) {
break;
}
corfuRuntime.invalidateLayout();
}
assertThat(corfuRuntime.getLayoutView().getLayout().getEpoch()).isEqualTo(l.getEpoch());
}
/**
* Tests healing of an unresponsive node now responding to pings.
* A rule is added on PORT_2 to drop all messages.
* The other 2 nodes PORT_0 and PORT_1 will detect this failure and mark it as unresponsive.
* The rule is then removed simulating a normal functioning PORT_2. The other nodes will now
* be able to successfully ping PORT_2. They then remove the node PORT_2 from the unresponsive
* servers list and mark as active.
*
* @throws Exception
*/
@Test
public void testNodeHealing() {
CorfuRuntime corfuRuntime = null;
try {
addServer(SERVERS.PORT_0);
addServer(SERVERS.PORT_1);
addServer(SERVERS.PORT_2);
Layout l = new TestLayoutBuilder()
.setEpoch(1L)
.addLayoutServer(SERVERS.PORT_0)
.addLayoutServer(SERVERS.PORT_1)
.addLayoutServer(SERVERS.PORT_2)
.addSequencer(SERVERS.PORT_0)
.buildSegment()
.buildStripe()
.addLogUnit(SERVERS.PORT_0)
.addToSegment()
.addToLayout()
.build();
bootstrapAllServers(l);
corfuRuntime = getRuntime(l).connect();
setAggressiveTimeouts(l, corfuRuntime,
getManagementServer(SERVERS.PORT_0).getManagementAgent().getCorfuRuntime());
setAggressiveDetectorTimeouts(SERVERS.PORT_0);
addServerRule(SERVERS.PORT_2, new TestRule().always().drop());
waitForLayoutChange(layout -> layout.getUnresponsiveServers()
.equals(Collections.singletonList(SERVERS.ENDPOINT_2)), corfuRuntime);
clearServerRules(SERVERS.PORT_2);
waitForLayoutChange(layout -> layout.getUnresponsiveServers().isEmpty()
&& layout.getSegments().size() == 1, corfuRuntime);
} finally {
if (corfuRuntime != null) {
corfuRuntime.shutdown();
}
}
}
/**
* Add a new node with layout, sequencer and log unit components.
* The new log unit node is open to reads and writes only in the new segment and no
* catchup or replication of old data is performed.
*
* @throws Exception
*/
@Test
public void testAddNodeWithoutCatchup() throws Exception {
CorfuRuntime rt = null;
try {
addServer(SERVERS.PORT_0);
Layout l1 = new TestLayoutBuilder()
.setEpoch(0L)
.addLayoutServer(SERVERS.PORT_0)
.addSequencer(SERVERS.PORT_0)
.buildSegment()
.buildStripe()
.addLogUnit(SERVERS.PORT_0)
.addToSegment()
.addToLayout()
.build();
bootstrapAllServers(l1);
ServerContext sc1 = new ServerContextBuilder()
.setSingle(false)
.setServerRouter(new TestServerRouter(SERVERS.PORT_1))
.setPort(SERVERS.PORT_1)
.build();
addServer(SERVERS.PORT_1, sc1);
rt = getNewRuntime(getDefaultNode())
.connect();
// Write to address space 0
rt.getStreamsView().get(CorfuRuntime.getStreamID("test"))
.append("testPayload".getBytes());
rt.getLayoutManagementView().addNode(l1, SERVERS.ENDPOINT_1,
true,
true,
true,
false,
0);
rt.invalidateLayout();
Layout layoutPhase2 = rt.getLayoutView().getLayout();
Layout l2 = new TestLayoutBuilder()
.setEpoch(1L)
.addLayoutServer(SERVERS.PORT_0)
.addLayoutServer(SERVERS.PORT_1)
.addSequencer(SERVERS.PORT_0)
.addSequencer(SERVERS.PORT_1)
.buildSegment()
.setStart(0L)
.setEnd(1L)
.buildStripe()
.addLogUnit(SERVERS.PORT_0)
.addToSegment()
.addToLayout()
.buildSegment()
.setStart(1L)
.setEnd(-1L)
.buildStripe()
.addLogUnit(SERVERS.PORT_0)
.addLogUnit(SERVERS.PORT_1)
.addToSegment()
.addToLayout()
.build();
assertThat(l2.asJSONString()).isEqualTo(layoutPhase2.asJSONString());
} finally {
if (rt != null) {
rt.shutdown();
}
}
}
/**
* This test starts with a cluster of 3 at epoch 1 with PORT_0 as the primary sequencer.
* Now runtime_1 writes 5 entries to streams A and B each.
* The token count has increased from 0-9.
*
* The cluster now reconfigures to mark PORT_1 as the primary sequencer for epoch 2.
* A stale client still observing epoch 1 requests 10 tokens from PORT_0.
* However 2 tokens are requested from PORT_1.
*
* The cluster again reconfigures to mark PORT_0 as the primary sequencer for epoch 3.
* The state of the new primary sequencer should now be recreated using the FastObjectLoader
* and should NOT reflect the 10 invalid tokens requested by the stale client.
*/
@Test
public void regressTokenCountToValidDispatchedTokens() throws Exception {
final UUID streamA = CorfuRuntime.getStreamID("streamA");
final UUID streamB = CorfuRuntime.getStreamID("streamB");
byte[] payload = "test_payload".getBytes();
Layout layout_1 = new Layout(getManagementTestLayout());
// In case any management agent is capable of detecting a failure (one node shutdown) before all
// three nodes go down, we will drop all messages to prevent reporting failures which could move
// the epoch, before the client actually moves it (leading to a wrongEpochException)
TestRule mngtAgentDropAll = new TestRule().always().drop();
addClientRule(getManagementServer(SERVERS.PORT_0).getManagementAgent().getCorfuRuntime(),
mngtAgentDropAll);
addClientRule(getManagementServer(SERVERS.PORT_1).getManagementAgent().getCorfuRuntime(),
mngtAgentDropAll);
addClientRule(getManagementServer(SERVERS.PORT_2).getManagementAgent().getCorfuRuntime(),
mngtAgentDropAll);
// Shut down management servers to prevent auto-reconfiguration.
getManagementServer(SERVERS.PORT_0).shutdown();
getManagementServer(SERVERS.PORT_1).shutdown();
getManagementServer(SERVERS.PORT_2).shutdown();
SequencerServer server0 = getSequencer(SERVERS.PORT_0);
SequencerServer server1 = getSequencer(SERVERS.PORT_1);
CorfuRuntime runtime_1 = getNewRuntime(getDefaultNode()).connect();
CorfuRuntime runtime_2 = getNewRuntime(getDefaultNode()).connect();
IStreamView streamViewA = runtime_1.getStreamsView().get(streamA);
IStreamView streamViewB = runtime_1.getStreamsView().get(streamB);
// Write 10 entries to the log using runtime_1.
// Stream A 0-4
streamViewA.append(payload);
streamViewA.append(payload);
streamViewA.append(payload);
streamViewA.append(payload);
streamViewA.append(payload);
// Stream B 5-9
streamViewB.append(payload);
streamViewB.append(payload);
streamViewB.append(payload);
streamViewB.append(payload);
streamViewB.append(payload);
// Add a rule to drop Seal and Paxos messages on PORT_0 (the current primary sequencer).
addClientRule(runtime_1, SERVERS.ENDPOINT_0, new TestRule().drop().always());
// Trigger reconfiguration and failover the sequencer to PORT_1
Layout layout_2 = new LayoutBuilder(layout_1)
.assignResponsiveSequencerAsPrimary(Collections.singleton(SERVERS.ENDPOINT_0))
.build();
layout_2.setEpoch(layout_2.getEpoch() + 1);
runtime_1.getLayoutView().getRuntimeLayout(layout_2).sealMinServerSet();
runtime_1.getLayoutView().updateLayout(layout_2, 1L);
runtime_1.getLayoutManagementView().reconfigureSequencerServers(layout_1, layout_2, false);
waitForLayoutChange(layout -> layout.getEpoch() == layout_2.getEpoch(), runtime_1);
clearClientRules(runtime_1);
// Using the stale client with view of epoch 1, request 10 tokens.
final int tokenCount = 5;
for (int x = 0; x < tokenCount; x++) {
runtime_2.getSequencerView().next(streamA);
runtime_2.getSequencerView().next(streamB);
}
// Using the new client request 2 tokens and write to the log.
streamViewA.append(payload);
streamViewA.append(payload);
final int expectedServer0Tokens = 20;
final int expectedServer1Tokens = 12;
assertThat(server0.getSequencerEpoch()).isEqualTo(layout_1.getEpoch());
assertThat(server1.getSequencerEpoch()).isEqualTo(layout_2.getEpoch());
assertThat(server0.getGlobalLogTail()).isEqualTo(expectedServer0Tokens);
assertThat(server1.getGlobalLogTail()).isEqualTo(expectedServer1Tokens);
// Trigger reconfiguration to failover back to PORT_0.
Layout layout_3 = new LayoutBuilder(layout_2)
.assignResponsiveSequencerAsPrimary(Collections.singleton(SERVERS.ENDPOINT_1))
.build();
layout_3.setEpoch(layout_3.getEpoch() + 1);
runtime_1.getLayoutView().getRuntimeLayout(layout_3).sealMinServerSet();
runtime_1.getLayoutView().updateLayout(layout_3, 1L);
runtime_1.getLayoutManagementView().reconfigureSequencerServers(layout_2, layout_3, false);
// Assert that the token count does not reflect the 10 tokens requested by the stale
// client on PORT_0.
assertThat(server0.getSequencerEpoch()).isEqualTo(layout_3.getEpoch());
assertThat(server1.getSequencerEpoch()).isEqualTo(layout_2.getEpoch());
assertThat(server0.getGlobalLogTail()).isEqualTo(expectedServer1Tokens);
assertThat(server1.getGlobalLogTail()).isEqualTo(expectedServer1Tokens);
// Assert that the streamTailMap has been reset and returns the correct backpointer.
final long expectedBackpointerStreamA = 11;
TokenResponse tokenResponse = runtime_1.getSequencerView().next(streamA);
assertThat(tokenResponse.getBackpointerMap().get(streamA))
.isEqualTo(expectedBackpointerStreamA);
}
/**
* Starts a cluster with 3 nodes.
* The epoch is then incremented and a layout proposed and accepted for the new epoch.
* This leaves the sequencer un-bootstrapped causing token requests to hang.
* The heartbeats should convey this primary sequencer NOT_READY state to the failure
* detector which bootstraps the sequencer.
*/
@Test
public void handleUnBootstrappedSequencer() throws Exception {
Layout layout = new Layout(getManagementTestLayout());
final long highRank = 10L;
// We increment the epoch and propose the same layout for the new epoch.
// Due to the router and sequencer epoch mismatch, the sequencer becomes NOT_READY.
// Note that this reconfiguration is not followed by the explicit sequencer bootstrap step.
layout.setEpoch(layout.getEpoch() + 1);
corfuRuntime.getLayoutView().getRuntimeLayout(layout).sealMinServerSet();
// We increase to a higher rank to avoid being outranked. We could be outranked if the management
// agent attempts to fill in the epoch slot before we update.
corfuRuntime.getLayoutView().updateLayout(layout, highRank);
// Assert that the primary sequencer is not ready.
assertThatThrownBy(() -> corfuRuntime.getLayoutView().getRuntimeLayout()
.getPrimarySequencerClient()
.requestMetrics().get()).hasCauseInstanceOf(ServerNotReadyException.class);
// Wait for the management service to detect and bootstrap the sequencer.
corfuRuntime.getSequencerView().query();
// Assert that the primary sequencer is bootstrapped.
assertThat(corfuRuntime.getLayoutView().getRuntimeLayout().getPrimarySequencerClient()
.requestMetrics().get().getSequencerStatus()).isEqualTo(SequencerStatus.READY);
}
/**
* Tests the Cluster Status Query API.
* The test starts with setting up a 3 node cluster:
* Layout Servers = PORT_0, PORT_1, PORT_2.
* Sequencer Servers = PORT_0, PORT_1, PORT_2.
* LogUnit Servers = PORT_0, PORT_1, PORT_2.
*
* STEP 1: First status query:
* All nodes up. Cluster status: STABLE.
*
* STEP 2: In this step the client is partitioned from the 2 nodes in the cluster.
* The cluster however is healthy. Status query:
* PORT_0 and PORT_1 are UNRESPONSIVE. Cluster status: STABLE
* (Since the cluster is table it is just the client that cannot reach all healthy servers)
*
* A few entries are appended on a Stream. This data is written to PORT_0, PORT_1 and PORT_2.
*
* STEP 3: Client connections are restored from the previous step. PORT_0 is failed.
* This causes sequencer failover. The cluster is still reachable. Status query:
* PORT_0 is DOWN. Cluster status: DEGRADED.
*
* STEP 4: PORT_1 is failed. The cluster is non-operational now. Status query:
* PORT_0 and PORT_1 UNRESPONSIVE, however the Cluster status: DEGRADED, as layout cannot
* converge to a new layout (no consensus).
*
* STEP 5: All nodes are failed. The cluster is non-operational now. Status query:
* PORT_0, PORT_1 and PORT_2 DOWN/UNRESPONSIVE. Cluster status: UNAVAILABLE.
*
*/
@Test
public void queryClusterStatus() throws Exception {
get3NodeLayout();
getCorfuRuntime().getLayoutView().getLayout().getAllServers().forEach(endpoint ->
getCorfuRuntime().getRouter(endpoint)
.setTimeoutResponse(PARAMETERS.TIMEOUT_VERY_SHORT.toMillis()));
// STEP 1.
ClusterStatusReport clusterStatus = getCorfuRuntime().getManagementView().getClusterStatus();
Map<String, ConnectivityStatus> nodeConnectivityMap = clusterStatus.getClientServerConnectivityStatusMap();
Map<String, NodeStatus> nodeStatusMap = clusterStatus.getClusterNodeStatusMap();
ClusterStatusReport.ClusterStatusReliability clusterStatusReliability = clusterStatus.getClusterStatusReliability();
assertThat(nodeConnectivityMap.get(SERVERS.ENDPOINT_0)).isEqualTo(ConnectivityStatus.RESPONSIVE);
assertThat(nodeConnectivityMap.get(SERVERS.ENDPOINT_1)).isEqualTo(ConnectivityStatus.RESPONSIVE);
assertThat(nodeConnectivityMap.get(SERVERS.ENDPOINT_2)).isEqualTo(ConnectivityStatus.RESPONSIVE);
assertThat(nodeStatusMap.get(SERVERS.ENDPOINT_0)).isEqualTo(NodeStatus.UP);
assertThat(nodeStatusMap.get(SERVERS.ENDPOINT_1)).isEqualTo(NodeStatus.UP);
assertThat(nodeStatusMap.get(SERVERS.ENDPOINT_2)).isEqualTo(NodeStatus.UP);
assertThat(clusterStatusReliability).isEqualTo(ClusterStatusReport.ClusterStatusReliability.STRONG_QUORUM);
assertThat(clusterStatus.getClusterStatus()).isEqualTo(ClusterStatus.STABLE);
// STEP 2.
// Because we are explicitly dropping PING messages only (which are used to verify
// client's connectivity) on ENDPOINT_0 and ENDPOINT_1, this test will show both nodes
// unresponsive, despite of their actual node status being UP.
TestRule rule = new TestRule()
.requestMatches(msg -> msg.getPayload().getPayloadCase().equals(PayloadCase.PING_REQUEST))
.drop();
addClientRule(getCorfuRuntime(), SERVERS.ENDPOINT_0, rule);
addClientRule(getCorfuRuntime(), SERVERS.ENDPOINT_1, rule);
clusterStatus = getCorfuRuntime().getManagementView().getClusterStatus();
nodeConnectivityMap = clusterStatus.getClientServerConnectivityStatusMap();
nodeStatusMap = clusterStatus.getClusterNodeStatusMap();
clusterStatusReliability = clusterStatus.getClusterStatusReliability();
assertThat(nodeConnectivityMap.get(SERVERS.ENDPOINT_0)).isEqualTo(ConnectivityStatus.UNRESPONSIVE);
assertThat(nodeConnectivityMap.get(SERVERS.ENDPOINT_1)).isEqualTo(ConnectivityStatus.UNRESPONSIVE);
assertThat(nodeConnectivityMap.get(SERVERS.ENDPOINT_2)).isEqualTo(ConnectivityStatus.RESPONSIVE);
assertThat(nodeStatusMap.get(SERVERS.ENDPOINT_0)).isEqualTo(NodeStatus.UP);
assertThat(nodeStatusMap.get(SERVERS.ENDPOINT_1)).isEqualTo(NodeStatus.UP);
assertThat(nodeStatusMap.get(SERVERS.ENDPOINT_2)).isEqualTo(NodeStatus.UP);
assertThat(clusterStatusReliability).isEqualTo(ClusterStatusReport.ClusterStatusReliability.STRONG_QUORUM);
assertThat(clusterStatus.getClusterStatus()).isEqualTo(ClusterStatus.STABLE);
// Write 10 entries. 0-9.
IStreamView streamView = getCorfuRuntime().getStreamsView()
.get(CorfuRuntime.getStreamID("testStream"));
final int entriesCount = 10;
final byte[] payload = "payload".getBytes();
for (int i = 0; i < entriesCount; i++) {
streamView.append(payload);
}
// STEP 3.
clearClientRules(getCorfuRuntime());
addServerRule(SERVERS.PORT_0, new TestRule().drop().always());
waitForLayoutChange(layout -> layout.getUnresponsiveServers().size() == 1
&& layout.getUnresponsiveServers().contains(SERVERS.ENDPOINT_0)
&& layout.getSegments().size() == 1,
getCorfuRuntime());
clusterStatus = getCorfuRuntime().getManagementView().getClusterStatus();
nodeConnectivityMap = clusterStatus.getClientServerConnectivityStatusMap();
nodeStatusMap = clusterStatus.getClusterNodeStatusMap();
clusterStatusReliability = clusterStatus.getClusterStatusReliability();
assertThat(nodeConnectivityMap.get(SERVERS.ENDPOINT_0)).isEqualTo(ConnectivityStatus.UNRESPONSIVE);
assertThat(nodeConnectivityMap.get(SERVERS.ENDPOINT_1)).isEqualTo(ConnectivityStatus.RESPONSIVE);
assertThat(nodeConnectivityMap.get(SERVERS.ENDPOINT_2)).isEqualTo(ConnectivityStatus.RESPONSIVE);
assertThat(nodeStatusMap.get(SERVERS.ENDPOINT_0)).isEqualTo(NodeStatus.DOWN);
assertThat(nodeStatusMap.get(SERVERS.ENDPOINT_1)).isEqualTo(NodeStatus.UP);
assertThat(nodeStatusMap.get(SERVERS.ENDPOINT_2)).isEqualTo(NodeStatus.UP);
assertThat(clusterStatusReliability).isEqualTo(ClusterStatusReport.ClusterStatusReliability.STRONG_QUORUM);
assertThat(clusterStatus.getClusterStatus()).isEqualTo(ClusterStatus.DEGRADED);
// STEP 4.
// Since there will be no epoch change as majority of servers are down, we cannot obtain
// a reliable state of the cluster and report unavailable, still responsiveness to each node
// in the cluster is reported.
Semaphore latch1 = new Semaphore(1);
latch1.acquire();
addClientRule(getManagementServer(SERVERS.PORT_2).getManagementAgent().getCorfuRuntime(),
new TestRule().requestMatches(msg -> {
if (msg.getPayload().getPayloadCase().equals(PayloadCase.REPORT_FAILURE_REQUEST)) {
latch1.release();
}
return false;
}).drop());
addServerRule(SERVERS.PORT_1, new TestRule().drop().always());
addClientRule(getManagementServer(SERVERS.PORT_1).getManagementAgent().getCorfuRuntime(),
new TestRule().always().drop());
assertThat(latch1.tryAcquire(PARAMETERS.TIMEOUT_LONG.toMillis(), TimeUnit.MILLISECONDS))
.isTrue();
clusterStatus = getCorfuRuntime().getManagementView().getClusterStatus();
nodeConnectivityMap = clusterStatus.getClientServerConnectivityStatusMap();
nodeStatusMap = clusterStatus.getClusterNodeStatusMap();
clusterStatusReliability = clusterStatus.getClusterStatusReliability();
assertThat(nodeConnectivityMap.get(SERVERS.ENDPOINT_0)).isEqualTo(ConnectivityStatus.UNRESPONSIVE);
assertThat(nodeConnectivityMap.get(SERVERS.ENDPOINT_1)).isEqualTo(ConnectivityStatus.UNRESPONSIVE);
assertThat(nodeConnectivityMap.get(SERVERS.ENDPOINT_2)).isEqualTo(ConnectivityStatus.RESPONSIVE);
assertThat(nodeStatusMap.get(SERVERS.ENDPOINT_0)).isEqualTo(NodeStatus.NA);
assertThat(nodeStatusMap.get(SERVERS.ENDPOINT_1)).isEqualTo(NodeStatus.NA);
assertThat(nodeStatusMap.get(SERVERS.ENDPOINT_2)).isEqualTo(NodeStatus.NA);
assertThat(clusterStatusReliability).isEqualTo(ClusterStatusReport.ClusterStatusReliability.WEAK_NO_QUORUM);
assertThat(clusterStatus.getClusterStatus()).isEqualTo(ClusterStatus.UNAVAILABLE);
// STEP 5.
clearClientRules(getCorfuRuntime());
addServerRule(SERVERS.PORT_2, new TestRule().drop().always());
clusterStatus = getCorfuRuntime().getManagementView().getClusterStatus();
nodeConnectivityMap = clusterStatus.getClientServerConnectivityStatusMap();
nodeStatusMap = clusterStatus.getClusterNodeStatusMap();
clusterStatusReliability = clusterStatus.getClusterStatusReliability();
assertThat(nodeConnectivityMap.get(SERVERS.ENDPOINT_0)).isEqualTo(ConnectivityStatus.UNRESPONSIVE);
assertThat(nodeConnectivityMap.get(SERVERS.ENDPOINT_1)).isEqualTo(ConnectivityStatus.UNRESPONSIVE);
assertThat(nodeConnectivityMap.get(SERVERS.ENDPOINT_2)).isEqualTo(ConnectivityStatus.UNRESPONSIVE);
assertThat(nodeStatusMap.get(SERVERS.ENDPOINT_0)).isEqualTo(NodeStatus.NA);
assertThat(nodeStatusMap.get(SERVERS.ENDPOINT_1)).isEqualTo(NodeStatus.NA);
assertThat(nodeStatusMap.get(SERVERS.ENDPOINT_2)).isEqualTo(NodeStatus.NA);
assertThat(clusterStatusReliability).isEqualTo(ClusterStatusReport.ClusterStatusReliability.UNAVAILABLE);
assertThat(clusterStatus.getClusterStatus()).isEqualTo(ClusterStatus.UNAVAILABLE);
}
/**
* Tests that if the cluster gets stuck in a live-lock the systemDownHandler is invoked.
* Scenario: Cluster of 2 nodes - Nodes 0 and 1
* Some data (10 appends) is written into the cluster.
* Then rules are added on both the nodes' management agents so that they cannot reconfigure
* the system. Another rule is added to the tail of the chain to drop all READ_RESPONSES.
* The epoch is incremented and the new layout is pushed to both the nodes.
* NOTE: The sequencer is not bootstrapped for the new epoch.
* Now, both the management agents attempt to bootstrap the new sequencer but the
* FastObjectLoaders should stall due to the READ_RESPONSE drop rule.
* This triggers the systemDownHandler.
*/
@Test
public void triggerSystemDownHandlerInDeadlock() throws Exception {
// Cluster Setup.
addServer(SERVERS.PORT_0);
addServer(SERVERS.PORT_1);
Layout layout = new TestLayoutBuilder()
.setEpoch(1L)
.addLayoutServer(SERVERS.PORT_0)
.addLayoutServer(SERVERS.PORT_1)
.addSequencer(SERVERS.PORT_0)
.addSequencer(SERVERS.PORT_1)
.buildSegment()
.buildStripe()
.addLogUnit(SERVERS.PORT_0)
.addLogUnit(SERVERS.PORT_1)
.addToSegment()
.addToLayout()
.setClusterId(UUID.randomUUID())
.build();
bootstrapAllServers(layout);
corfuRuntime = getRuntime(layout).connect();
CorfuRuntime managementRuntime0 = getManagementServer(SERVERS.PORT_0)
.getManagementAgent().getCorfuRuntime();
CorfuRuntime managementRuntime1 = getManagementServer(SERVERS.PORT_1)
.getManagementAgent().getCorfuRuntime();
waitForSequencerToBootstrap(SERVERS.PORT_0);
// Setting aggressive timeouts
setAggressiveTimeouts(layout, corfuRuntime, managementRuntime0, managementRuntime1);
setAggressiveDetectorTimeouts(SERVERS.PORT_0, SERVERS.PORT_1);
// Append data.
IStreamView streamView = corfuRuntime.getStreamsView()
.get(CorfuRuntime.getStreamID("testStream"));
final byte[] payload = "test".getBytes();
final int num = 10;
for (int i = 0; i < num; i++) {
streamView.append(payload);
}
// Register custom systemDownHandler to detect live-lock.
final Semaphore semaphore = new Semaphore(2);
semaphore.acquire(2);
final int sysDownTriggerLimit = 3;
managementRuntime0.getParameters().setSystemDownHandlerTriggerLimit(sysDownTriggerLimit);
managementRuntime1.getParameters().setSystemDownHandlerTriggerLimit(sysDownTriggerLimit);
TestRule testRule = new TestRule()
.requestMatches(msg -> msg.getPayload().getPayloadCase().equals(PayloadCase.SEAL_REQUEST))
.drop();
addClientRule(managementRuntime0, testRule);
addClientRule(managementRuntime1, testRule);
// Since the fast loader will retrieve the tails from the head node,
// we need to drop all tail requests to hang the FastObjectLoaders
addServerRule(SERVERS.PORT_0, new TestRule().responseMatches(m -> {
if (m.getPayload().getPayloadCase().equals(ResponsePayloadMsg.PayloadCase.LOG_ADDRESS_SPACE_RESPONSE)) {
semaphore.release();
return true;
}
return false;
}).drop());
// Trigger an epoch change to trigger FastObjectLoader to run for sequencer bootstrap.
Layout layout1 = new Layout(layout);
layout1.setEpoch(layout1.getEpoch() + 1);
corfuRuntime.getLayoutView().getRuntimeLayout(layout1).sealMinServerSet();
corfuRuntime.getLayoutView().updateLayout(layout1, 1L);
assertThat(semaphore
.tryAcquire(2, PARAMETERS.TIMEOUT_LONG.toMillis(), TimeUnit.MILLISECONDS))
.isTrue();
// Create a fault - Epoch instability by just sealing the cluster but not filling the
// layout slot.
corfuRuntime.invalidateLayout();
Layout layout2 = new Layout(corfuRuntime.getLayoutView().getLayout());
layout2.setEpoch(layout2.getEpoch() + 1);
corfuRuntime.getLayoutView().getRuntimeLayout(layout2).sealMinServerSet();
clearClientRules(managementRuntime0);
clearClientRules(managementRuntime1);
for (int i = 0; i < PARAMETERS.NUM_ITERATIONS_MODERATE; i++) {
if (corfuRuntime.getLayoutView().getLayout().getEpoch() == layout2.getEpoch()) {
break;
}
corfuRuntime.invalidateLayout();
TimeUnit.MILLISECONDS.sleep(PARAMETERS.TIMEOUT_SHORT.toMillis());
}
// Assert that the DetectionWorker threads are freed from the deadlock and are able to fill
// up the layout slot and stabilize the cluster.
assertThat(corfuRuntime.getLayoutView().getLayout().getEpoch())
.isEqualTo(layout2.getEpoch());
clearServerRules(SERVERS.PORT_0);
// Once the rules are cleared, the detectors should resolve the epoch instability,
// bootstrap the sequencer and fetch a new token.
assertThat(corfuRuntime.getSequencerView().query()).isNotNull();
}
/**
* Tests the triggerSequencerReconfiguration method. The READ_RESPONSE messages are blocked by
* adding a rule to drop these. The reconfiguration task unblocks with the help of the
* systemDownHandler.
*/
@Test
public void unblockSequencerRecoveryOnDeadlock() throws Exception {
CorfuRuntime corfuRuntime = getDefaultRuntime();
final Layout layout = corfuRuntime.getLayoutView().getLayout();
// Setting aggressive timeouts
setAggressiveTimeouts(layout, corfuRuntime,
getManagementServer(SERVERS.PORT_0).getManagementAgent().getCorfuRuntime());
setAggressiveDetectorTimeouts(SERVERS.PORT_0);
final int sysDownTriggerLimit = 3;
getManagementServer(SERVERS.PORT_0).getManagementAgent().getCorfuRuntime().getParameters()
.setSystemDownHandlerTriggerLimit(sysDownTriggerLimit);
// Add rule to drop all read responses to hang the FastObjectLoaders.
addServerRule(SERVERS.PORT_0, new TestRule().responseMatches(m -> m.getPayload().getPayloadCase()
.equals(ResponsePayloadMsg.PayloadCase.READ_LOG_RESPONSE)).drop());
getManagementServer(SERVERS.PORT_0).getManagementAgent()
.getCorfuRuntime().getLayoutManagementView()
.asyncSequencerBootstrap(layout,
getManagementServer(SERVERS.PORT_0).getManagementAgent()
.getRemoteMonitoringService().getFailureDetectorWorker())
.get();
}
/**
* Tests that a degraded cluster heals a sealed cluster.
* NOTE: A sealed cluster without a layout causes the system to halt as none of the clients can
* perform data operations until the new epoch is filled in with a layout.
* Scenario: 3 nodes - PORT_0, PORT_1 and PORT_2.
* A server rule is added to simulate PORT_2 as unresponsive.
* First, the degraded cluster moves from epoch 1 to epoch 2 to mark PORT_2 unresponsive.
* Now, PORT_0 and PORT_1 are sealed to epoch 3.
* The fault detectors detect this and fills the epoch 3 with a layout.
*/
@Test
public void testSealedDegradedClusterHealing() throws Exception {
get3NodeLayout();
CorfuRuntime corfuRuntime = getDefaultRuntime();
addServerRule(SERVERS.PORT_2, new TestRule().always().drop());
for (int i = 0; i < PARAMETERS.NUM_ITERATIONS_MODERATE; i++) {
corfuRuntime.invalidateLayout();
if (!corfuRuntime.getLayoutView().getLayout().getUnresponsiveServers().isEmpty()) {
break;
}
TimeUnit.MILLISECONDS.sleep(PARAMETERS.TIMEOUT_VERY_SHORT.toMillis());
}
Layout layout = new Layout(corfuRuntime.getLayoutView().getLayout());
layout.setEpoch(layout.getEpoch() + 1);
corfuRuntime.getLayoutView().getRuntimeLayout(layout).sealMinServerSet();
for (int i = 0; i < PARAMETERS.NUM_ITERATIONS_MODERATE; i++) {
corfuRuntime.invalidateLayout();
if (corfuRuntime.getLayoutView().getLayout().getEpoch() == layout.getEpoch()) {
break;
}
TimeUnit.MILLISECONDS.sleep(PARAMETERS.TIMEOUT_VERY_SHORT.toMillis());
}
assertThat(corfuRuntime.getLayoutView().getLayout()).isEqualTo(layout);
}
/**
* Write a random entry to the specified CorfuTable.
*
* @param table CorfuTable to populate.
*/
private void writeRandomEntryToTable(CorfuTable table) {
Random r = new Random();
corfuRuntime.getObjectsView().TXBegin();
table.put(r.nextInt(), r.nextInt());
corfuRuntime.getObjectsView().TXEnd();
}
/**
* Test scenario where the sequencer bootstrap triggers cache cleanup causing maxConflictWildcard to be reset.
* The runtime requests for 2 tokens but persists only 1 log entry. On an epoch change, the failover sequencer
* (in this case, itself) is bootstrapped by running the fastObjectLoader.
* This bootstrap sets the token to 1 and maxConflictWildcard to 0. This test asserts that the maxConflictWildcard
* stays 0 even after the cache eviction and does not abort transactions with SEQUENCER_OVERFLOW cause.
*/
@Test
public void testSequencerCacheOverflowOnFailover() throws Exception {
corfuRuntime = getDefaultRuntime();
CorfuTable<String, String> table = corfuRuntime.getObjectsView().build()
.setTypeToken(new TypeToken<CorfuTable<String, String>>() {
})
.setStreamName("test")
.open();
writeRandomEntryToTable(table);
// Block the writes so that we only fetch a sequencer token but not persist the entry on the LogUnit.
addClientRule(corfuRuntime, new TestRule().requestMatches(m ->
m.getPayload().getPayloadCase().equals(PayloadCase.WRITE_LOG_REQUEST)).drop());
CompletableFuture<Boolean> future = CompletableFuture.supplyAsync(() -> {
writeRandomEntryToTable(table);
return true;
});
// Block any sequencer bootstrap attempts.
addClientRule(getManagementServer(SERVERS.PORT_0).getManagementAgent().getCorfuRuntime(), new TestRule()
.requestMatches(msg ->
msg.getPayload().getPayloadCase().equals(PayloadCase.BOOTSTRAP_SEQUENCER_REQUEST)).drop());
// Increment the sequencer epoch twice so that a full sequencer bootstrap is required.
incrementClusterEpoch(corfuRuntime);
Layout layout = incrementClusterEpoch(corfuRuntime);
// Clear rules to now allow sequencer bootstrap.
clearClientRules(getManagementServer(SERVERS.PORT_0).getManagementAgent().getCorfuRuntime());
while (getSequencer(SERVERS.PORT_0).getSequencerEpoch() != layout.getEpoch()) {
TimeUnit.MILLISECONDS.sleep(PARAMETERS.TIMEOUT_VERY_SHORT.toMillis());
}
clearClientRules(corfuRuntime);
// Attempt data operation.
// The data operation should fail if the maxConflictWildcard is updated on cache invalidation causing the
// the value to change.
writeRandomEntryToTable(table);
future.cancel(true);
}
/**
* Tests the partial bootstrap scenario while adding a new node.
* Starts with cluster of 1 node: PORT_0.
* We then bootstrap the layout server of PORT_1 with a layout.
* Then the bootstrapNewNode is triggered. This should detect the same layout and complete the bootstrap.
*/
@Test
public void testPartialBootstrapNodeSuccess() throws ExecutionException, InterruptedException {
corfuRuntime = getDefaultRuntime();
addServer(SERVERS.PORT_1);
Layout layout = corfuRuntime.getLayoutView().getLayout();
// Bootstrap the layout server.
corfuRuntime.getLayoutView().getRuntimeLayout().getLayoutClient(SERVERS.ENDPOINT_1).bootstrapLayout(layout);
// Attempt bootstrapping the node. The node should attempt bootstrapping both the components Layout Server and
// Management Server.
assertThat(corfuRuntime.getLayoutManagementView().bootstrapNewNode(SERVERS.ENDPOINT_1).get()).isTrue();
}
/**
* Tests the partial bootstrap scenario while adding a new node.
* Starts with cluster of 1 node: PORT_0.
* We then bootstrap the layout server of PORT_1 with a layout.
* A rule is added to prevent the management server from being bootstrapped. Then the bootstrapNewNode is
* triggered. This should pass, since the bootstrap new node function is a part of an add node workflow, that
* requires a management server to be bootstrapped.
*/
@Test
public void testPartialBootstrapNodeFailure() {
corfuRuntime = getDefaultRuntime();
addServer(SERVERS.PORT_1);
Layout layout = corfuRuntime.getLayoutView().getLayout();
// Bootstrap the layout server.
corfuRuntime.getLayoutView().getRuntimeLayout().getLayoutClient(SERVERS.ENDPOINT_1).bootstrapLayout(layout);
addClientRule(corfuRuntime, new TestRule().requestMatches(msg ->
msg.getPayload().getPayloadCase().equals(PayloadCase.BOOTSTRAP_MANAGEMENT_REQUEST)).drop());
assertThat(corfuRuntime.getLayoutManagementView().bootstrapNewNode(SERVERS.ENDPOINT_1).join()).isTrue();
}
}
| 32,135 |
1,027 | // Copyright (c) 2015 GitHub, Inc.
// Use of this source code is governed by the MIT license that can be
// found in the LICENSE file.
#ifndef ATOM_BROWSER_ATOM_DOWNLOAD_MANAGER_DELEGATE_H_
#define ATOM_BROWSER_ATOM_DOWNLOAD_MANAGER_DELEGATE_H_
#include <memory>
#include <string>
#include <vector>
#include "atom/browser/api/atom_api_download_item.h"
#include "base/memory/weak_ptr.h"
#include "base/strings/utf_string_conversions.h"
#include "base/task_scheduler/post_task.h"
#include "chrome/browser/download/download_path_reservation_tracker.h"
#include "chrome/browser/download/download_target_determiner.h"
#include "chrome/browser/safe_browsing/download_protection/download_protection_service.h"
#include "content/public/browser/download_manager_delegate.h"
class DownloadPrefs;
class Profile;
namespace content {
class DownloadManager;
}
namespace atom {
class AtomDownloadManagerDelegate : public content::DownloadManagerDelegate,
public DownloadTargetDeterminerDelegate {
public:
explicit AtomDownloadManagerDelegate(content::DownloadManager* manager);
virtual ~AtomDownloadManagerDelegate();
bool GenerateFileHash() override;
void OnDownloadTargetDetermined(
int32_t download_id,
const content::DownloadTargetCallback& callback,
std::unique_ptr<DownloadTargetInfo> target_info);
// content::DownloadManagerDelegate:
void Shutdown() override;
bool DetermineDownloadTarget(
download::DownloadItem* download,
const content::DownloadTargetCallback& callback) override;
bool ShouldOpenDownload(
download::DownloadItem* download,
const content::DownloadOpenDelayedCallback& callback) override;
void GetNextId(const content::DownloadIdCallback& callback) override;
bool ShouldCompleteDownload(download::DownloadItem* item,
const base::Closure& complete_callback) override;
protected:
virtual safe_browsing::DownloadProtectionService*
GetDownloadProtectionService();
void CheckDownloadUrl(download::DownloadItem* download,
const base::FilePath& suggested_virtual_path,
const CheckDownloadUrlCallback& callback) override;
void NotifyExtensions(download::DownloadItem* download,
const base::FilePath& suggested_virtual_path,
const NotifyExtensionsCallback& callback) override;
void ReserveVirtualPath(
download::DownloadItem* download,
const base::FilePath& virtual_path,
bool create_directory,
DownloadPathReservationTracker::FilenameConflictAction conflict_action,
const ReservedPathCallback& callback) override;
void RequestConfirmation(download::DownloadItem* download,
const base::FilePath& suggested_virtual_path,
DownloadConfirmationReason reason,
const ConfirmationCallback& callback) override;
void DetermineLocalPath(download::DownloadItem* download,
const base::FilePath& virtual_path,
const LocalPathCallback& callback) override;
void GetFileMimeType(const base::FilePath& path,
const GetFileMimeTypeCallback& callback) override;
private:
bool IsDownloadReadyForCompletion(
download::DownloadItem* item,
const base::Closure& internal_complete_callback);
void GetItemSavePath(download::DownloadItem* item, base::FilePath* path);
bool GetExtension(download::DownloadItem* item,
const base::FilePath& target_path,
base::FilePath::StringType* extension);
void OnDownloadItemSelected(const content::DownloadTargetCallback& callback,
std::unique_ptr<DownloadTargetInfo> target_info,
api::DownloadItem* download_item,
const std::vector<base::FilePath>& paths);
void OnDownloadItemSelectionCancelled(
const content::DownloadTargetCallback& callback,
download::DownloadItem* item);
void CheckClientDownloadDone(uint32_t download_id,
safe_browsing::DownloadCheckResult result);
void ShouldCompleteDownloadInternal(
uint32_t download_id,
const base::Closure& user_complete_callback);
content::DownloadManager* download_manager_;
std::unique_ptr<DownloadPrefs> download_prefs_;
base::WeakPtrFactory<AtomDownloadManagerDelegate> weak_ptr_factory_;
DISALLOW_COPY_AND_ASSIGN(AtomDownloadManagerDelegate);
};
} // namespace atom
#endif // ATOM_BROWSER_ATOM_DOWNLOAD_MANAGER_DELEGATE_H_
| 1,709 |
2,015 | <reponame>theRealBaccata/picotorrent
#pragma once
#include <stdint.h>
namespace pt
{
namespace BitTorrent
{
struct TorrentStatistics
{
bool isDownloadingAny;
int64_t totalPayloadDownloadRate;
int64_t totalPayloadUploadRate;
int64_t totalWanted;
int64_t totalWantedDone;
};
}
}
| 149 |
1,682 | <reponame>haroldl/rest.li
/*
Copyright (c) 2012 LinkedIn Corp.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package com.linkedin.restli.server;
import com.linkedin.data.schema.RecordDataSchema;
import com.linkedin.data.template.DataTemplateUtil;
import com.linkedin.data.template.RecordTemplate;
/**
* This is marker class, and not meant to be instantiated. Use as the metadata type in CollectResult to indicate that no metadata is provided.
* See example in the integration test
*
* @author <NAME>
*/
public final class NoMetadata extends RecordTemplate
{
private final static RecordDataSchema SCHEMA = (RecordDataSchema) DataTemplateUtil.parseSchema("{\"type\":\"record\",\"name\":\"NoMetadata\",\"namespace\":\"com.linkedin.restli.server\",\"doc\":\"This is marker class, and not meant to be instantiated. Use as the metadata type in CollectResult to indicate that no metadata is provided.\",\"fields\":[]}");
private NoMetadata()
{
super(null, null);
}
}
| 421 |
812 | <filename>server/src/main/java/password/pwm/svc/intruder/IntruderServiceClient.java
/*
* Password Management Servlets (PWM)
* http://www.pwm-project.org
*
* Copyright (c) 2006-2009 Novell, Inc.
* Copyright (c) 2009-2021 The PWM Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package password.pwm.svc.intruder;
import password.pwm.PwmDomain;
import password.pwm.bean.SessionLabel;
import password.pwm.bean.UserIdentity;
import password.pwm.config.PwmSetting;
import password.pwm.config.value.data.FormConfiguration;
import password.pwm.error.PwmError;
import password.pwm.error.PwmUnrecoverableException;
import password.pwm.http.PwmRequest;
import password.pwm.http.PwmSession;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import java.util.Map;
public class IntruderServiceClient
{
private IntruderServiceClient()
{
}
public static void checkAddressAndSession( final PwmDomain pwmDomain, final PwmSession pwmSession )
throws PwmUnrecoverableException
{
final IntruderDomainService intruderService = pwmDomain.getIntruderService();
if ( pwmSession != null )
{
final String subject = pwmSession.getSessionStateBean().getSrcAddress();
intruderService.check( IntruderRecordType.ADDRESS, subject );
final int maxAllowedAttempts = ( int ) pwmDomain.getConfig().readSettingAsLong( PwmSetting.INTRUDER_SESSION_MAX_ATTEMPTS );
if ( maxAllowedAttempts != 0 && pwmSession.getSessionStateBean().getIntruderAttempts().get() > maxAllowedAttempts )
{
throw new PwmUnrecoverableException( PwmError.ERROR_INTRUDER_SESSION );
}
}
}
public static void markAddressAndSession( final PwmDomain pwmDomain, final PwmSession pwmSession )
throws PwmUnrecoverableException
{
final IntruderDomainService intruderService = pwmDomain.getIntruderService();
if ( pwmSession != null )
{
final String subject = pwmSession.getSessionStateBean().getSrcAddress();
pwmSession.getSessionStateBean().incrementIntruderAttempts();
intruderService.mark( IntruderRecordType.ADDRESS, subject, pwmSession.getLabel() );
}
}
public static void clearAddressAndSession( final PwmDomain pwmDomain, final PwmSession pwmSession )
throws PwmUnrecoverableException
{
final IntruderDomainService intruderService = pwmDomain.getIntruderService();
if ( pwmSession != null )
{
final String subject = pwmSession.getSessionStateBean().getSrcAddress();
intruderService.clear( IntruderRecordType.ADDRESS, subject );
pwmSession.getSessionStateBean().clearIntruderAttempts();
pwmSession.getSessionStateBean().setSessionIdRecycleNeeded( true );
}
}
public static void checkUserIdentity( final PwmDomain pwmDomain, final UserIdentity userIdentity )
throws PwmUnrecoverableException
{
final IntruderDomainService intruderService = pwmDomain.getIntruderService();
if ( userIdentity != null )
{
final String subject = userIdentity.toDelimitedKey();
intruderService.check( IntruderRecordType.USER_ID, subject );
}
}
public static void markUserIdentity( final PwmRequest pwmRequest, final UserIdentity userIdentity )
throws PwmUnrecoverableException
{
markUserIdentity( pwmRequest.getPwmDomain(), pwmRequest.getLabel(), userIdentity );
}
public static void markUserIdentity( final PwmDomain pwmDomain, final SessionLabel sessionLabel, final UserIdentity userIdentity )
throws PwmUnrecoverableException
{
final IntruderDomainService intruderService = pwmDomain.getIntruderService();
if ( userIdentity != null )
{
final String subject = userIdentity.toDelimitedKey();
intruderService.mark( IntruderRecordType.USER_ID, subject, sessionLabel );
}
}
public static void clearUserIdentity( final PwmRequest pwmRequest, final UserIdentity userIdentity )
throws PwmUnrecoverableException
{
final IntruderDomainService intruderService = pwmRequest.getPwmDomain().getIntruderService();
if ( userIdentity != null )
{
final String subject = userIdentity.toDelimitedKey();
intruderService.clear( IntruderRecordType.USER_ID, subject );
}
}
public static void markAttributes( final PwmRequest pwmRequest, final Map<FormConfiguration, String> formValues )
throws PwmUnrecoverableException
{
markAttributes( pwmRequest.getPwmDomain(), formValues, pwmRequest.getLabel() );
}
public static void markAttributes( final PwmDomain pwmDomain, final Map<FormConfiguration, String> formValues, final SessionLabel sessionLabel )
throws PwmUnrecoverableException
{
final IntruderDomainService intruderService = pwmDomain.getIntruderService();
final List<String> subjects = attributeFormToList( formValues );
for ( final String subject : subjects )
{
intruderService.mark( IntruderRecordType.ATTRIBUTE, subject, sessionLabel );
}
}
public static void clearAttributes( final PwmDomain pwmDomain, final Map<FormConfiguration, String> formValues )
throws PwmUnrecoverableException
{
final IntruderDomainService intruderService = pwmDomain.getIntruderService();
final List<String> subjects = attributeFormToList( formValues );
for ( final String subject : subjects )
{
intruderService.clear( IntruderRecordType.ATTRIBUTE, subject );
}
}
public static void checkAttributes( final PwmDomain pwmDomain, final Map<FormConfiguration, String> formValues )
throws PwmUnrecoverableException
{
final IntruderDomainService intruderService = pwmDomain.getIntruderService();
final List<String> subjects = attributeFormToList( formValues );
for ( final String subject : subjects )
{
intruderService.check( IntruderRecordType.ATTRIBUTE, subject );
}
}
private static List<String> attributeFormToList( final Map<FormConfiguration, String> formValues )
{
final List<String> returnList = new ArrayList<>();
if ( formValues != null )
{
for ( final Map.Entry<FormConfiguration, String> entry : formValues.entrySet() )
{
final FormConfiguration formConfiguration = entry.getKey();
final String value = entry.getValue();
if ( value != null && value.length() > 0 )
{
returnList.add( formConfiguration.getName() + ":" + value );
}
}
}
return Collections.unmodifiableList( returnList );
}
}
| 2,799 |
341 | <filename>src/main/java/com/ccnode/codegenerator/function/EqualCondition.java
package com.ccnode.codegenerator.function;
/**
* What always stop you is what you always believe.
* <p>
* Created by zhengjun.du on 2016/05/29 15:54
*/
public interface EqualCondition<T> {
public boolean isEqual( T t, T t2 );
}
| 108 |
16,989 | // Copyright 2017 The Bazel Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package com.google.devtools.build.lib.shell;
import com.google.devtools.build.lib.vfs.Path;
import java.io.BufferedInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.time.Duration;
import java.util.Optional;
/** Provides execution statistics (e.g. resource usage) for external commands. */
public final class ExecutionStatistics {
/**
* Provides execution statistics based on a {@code execution_statistics.proto} file.
*
* @param executionStatisticsProtoPath path to a materialized ExecutionStatistics proto
* @return a {@link ResourceUsage} object containing execution statistics, if available
*/
public static Optional<ResourceUsage> getResourceUsage(Path executionStatisticsProtoPath)
throws IOException {
try (InputStream protoInputStream =
new BufferedInputStream(executionStatisticsProtoPath.getInputStream())) {
Protos.ExecutionStatistics executionStatisticsProto =
Protos.ExecutionStatistics.parseFrom(protoInputStream);
if (executionStatisticsProto.hasResourceUsage()) {
return Optional.of(new ResourceUsage(executionStatisticsProto.getResourceUsage()));
} else {
return Optional.empty();
}
}
}
/**
* Provides resource usage statistics for command execution, derived from the getrusage() system
* call.
*/
public static class ResourceUsage {
private final Protos.ResourceUsage resourceUsageProto;
/** Provides resource usage statistics via a ResourceUsage proto object. */
public ResourceUsage(Protos.ResourceUsage resourceUsageProto) {
this.resourceUsageProto = resourceUsageProto;
}
/** Returns the user time for command execution, if available. */
public Duration getUserExecutionTime() {
return Duration.ofSeconds(
resourceUsageProto.getUtimeSec(), resourceUsageProto.getUtimeUsec() * 1000);
}
/** Returns the system time for command execution, if available. */
public Duration getSystemExecutionTime() {
return Duration.ofSeconds(
resourceUsageProto.getStimeSec(), resourceUsageProto.getStimeUsec() * 1000);
}
/** Returns the maximum resident set size (in bytes) during command execution, if available. */
public long getMaximumResidentSetSize() {
return resourceUsageProto.getMaxrss();
}
/**
* Returns the integral shared memory size (in bytes) during command execution, if available.
*/
public long getIntegralSharedMemorySize() {
return resourceUsageProto.getIxrss();
}
/**
* Returns the integral unshared data size (in bytes) during command execution, if available.
*/
public long getIntegralUnsharedDataSize() {
return resourceUsageProto.getIdrss();
}
/**
* Returns the integral unshared stack size (in bytes) during command execution, if available.
*/
public long getIntegralUnsharedStackSize() {
return resourceUsageProto.getIsrss();
}
/**
* Returns the number of page reclaims (soft page faults) during command execution, if
* available.
*/
public long getPageReclaims() {
return resourceUsageProto.getMinflt();
}
/** Returns the number of (hard) page faults during command execution, if available. */
public long getPageFaults() {
return resourceUsageProto.getMajflt();
}
/** Returns the number of swaps during command execution, if available. */
public long getSwaps() {
return resourceUsageProto.getNswap();
}
/** Returns the number of block input operations during command execution, if available. */
public long getBlockInputOperations() {
return resourceUsageProto.getInblock();
}
/** Returns the number of block output operations during command execution, if available. */
public long getBlockOutputOperations() {
return resourceUsageProto.getOublock();
}
/** Returns the number of IPC messages sent during command execution, if available. */
public long getIpcMessagesSent() {
return resourceUsageProto.getMsgsnd();
}
/** Returns the number of IPC messages received during command execution, if available. */
public long getIpcMessagesReceived() {
return resourceUsageProto.getMsgrcv();
}
/** Returns the number of signals received during command execution, if available. */
public long getSignalsReceived() {
return resourceUsageProto.getNsignals();
}
/** Returns the number of voluntary context switches during command execution, if available. */
public long getVoluntaryContextSwitches() {
return resourceUsageProto.getNvcsw();
}
/**
* Returns the number of involuntary context switches during command execution, if available.
*/
public long getInvoluntaryContextSwitches() {
return resourceUsageProto.getNivcsw();
}
}
}
| 1,673 |
2,333 | //
// MPLoggerFormatter.h
// MobileProject
//
// Created by wujunyang on 16/6/20.
// Copyright © 2016年 wujunyang. All rights reserved.
//
#import <Foundation/Foundation.h>
@interface MPLoggerFormatter : NSObject<DDLogFormatter>
@end
| 89 |
2,996 | <reponame>Elyahu41/Terasology
// Copyright 2021 The Terasology Foundation
// SPDX-License-Identifier: Apache-2.0
package org.terasology.engine.rendering.dag.dependencyConnections;
import org.terasology.engine.core.SimpleUri;
import org.terasology.engine.rendering.opengl.FBO;
public class FboConnection extends DependencyConnection<FBO> {
/**
*
* @param name
* @param type
* @param parentNode
*/
public FboConnection(String name, Type type, SimpleUri parentNode) {
super(name, type, parentNode);
}
/**
*
* @param name
* @param type
* @param data
* @param parentNode
*/
public FboConnection(String name, Type type, FBO data, SimpleUri parentNode) {
super(name, type, parentNode);
super.setData(data);
}
public String toString() {
return super.toString();
}
public static String getConnectionName(int number, SimpleUri nodeUri) {
return new StringBuilder(nodeUri.toString()).append(":FBO").append(number).toString();
}
}
| 412 |
890 | <reponame>light1021/asylo
/*
*
* Copyright 2018 Asylo authors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
#ifndef ASYLO_TEST_UTIL_PROTO_MATCHERS_H_
#define ASYLO_TEST_UTIL_PROTO_MATCHERS_H_
#include <google/protobuf/util/message_differencer.h>
#include <gmock/gmock.h>
#include <gtest/gtest.h>
#include "absl/strings/string_view.h"
namespace asylo {
namespace internal {
template <typename MessageT>
class ProtoMatcher {
public:
ProtoMatcher(const MessageT &message,
std::function<bool(const MessageT &, const MessageT &)>
message_comparator)
: message_(message), message_comparator_(std::move(message_comparator)) {}
void DescribeTo(std::ostream *os) const { Describe(os, "matches"); }
void DescribeNegationTo(std::ostream *os) const {
Describe(os, "does not match");
}
bool MatchAndExplain(const MessageT &message,
::testing::MatchResultListener *listener) const {
if (!message_comparator_(message, message_)) {
*listener << "which doesn't match";
return false;
}
return true;
}
private:
void Describe(std::ostream *os, absl::string_view explanation) const {
*os << explanation << " " << message_.GetDescriptor()->full_name() << " ";
::testing::internal::UniversalPrint(message_, os);
}
const MessageT &message_;
std::function<bool(const MessageT &, const MessageT &)> message_comparator_;
};
} // namespace internal
// A proto message matches Equally to another if all fields have been
// set to the same value.
template <typename MessageT>
::testing::PolymorphicMatcher<internal::ProtoMatcher<MessageT>> EqualsProto(
const MessageT &message) {
std::function<bool(const MessageT &, const MessageT &)> comparator =
::google::protobuf::util::MessageDifferencer::Equals;
return ::testing::MakePolymorphicMatcher(
internal::ProtoMatcher<MessageT>(message, std::move(comparator)));
}
// A proto message matches Equivalently to another if all fields have
// the same value. This is different than Equals, in that fields with
// default values are compared. Two protos with uninitialized
// (default) values can never be Equal, but could be Equivalent.
template <typename MessageT>
::testing::PolymorphicMatcher<internal::ProtoMatcher<MessageT>> EquivalentProto(
const MessageT &message) {
std::function<bool(const MessageT &, const MessageT &)> comparator =
::google::protobuf::util::MessageDifferencer::Equivalent;
return ::testing::MakePolymorphicMatcher(
internal::ProtoMatcher<MessageT>(message, std::move(comparator)));
}
// A proto message matches Approximately Equally to another if all
// fields have been set equally, except for float fields which are
// instead compared with MathUtil::AlmostEquals().
template <typename MessageT>
::testing::PolymorphicMatcher<internal::ProtoMatcher<MessageT>>
ApproximatelyEqualsProto(const MessageT &message) {
std::function<bool(const MessageT &, const MessageT &)> comparator =
::google::protobuf::util::MessageDifferencer::ApproximatelyEquals;
return ::testing::MakePolymorphicMatcher(
internal::ProtoMatcher<MessageT>(message, std::move(comparator)));
}
// A proto message matches Approximately Equivalent to another if all
// fields are equivalent (see EquivalentProto above), except for float
// fields which are instead compared with MathUtil::AlmostEquals().
template <typename MessageT>
::testing::PolymorphicMatcher<internal::ProtoMatcher<MessageT>>
ApproximatelyEquivalentProto(const MessageT &message) {
std::function<bool(const MessageT &, const MessageT &)> comparator =
::google::protobuf::util::MessageDifferencer::ApproximatelyEquivalent;
return ::testing::MakePolymorphicMatcher(
internal::ProtoMatcher<MessageT>(message, std::move(comparator)));
}
// A proto message matches Partially(reference_message) if every field that is
// set in reference_message is set to the same value in the matchee message.
template <typename MessageT>
::testing::PolymorphicMatcher<internal::ProtoMatcher<MessageT>> Partially(
const MessageT &message) {
std::function<bool(const MessageT &, const MessageT &)> comparator =
[](const ::google::protobuf::Message &arg, const ::google::protobuf::Message &other) {
::google::protobuf::util::MessageDifferencer differ;
differ.set_scope(::google::protobuf::util::MessageDifferencer::PARTIAL);
return differ.Compare(other, arg);
};
return ::testing::MakePolymorphicMatcher(
internal::ProtoMatcher<MessageT>(message, std::move(comparator)));
}
} // namespace asylo
#endif // ASYLO_TEST_UTIL_PROTO_MATCHERS_H_
| 1,689 |
3,095 | <reponame>liyuzhao/QWidgetDemo
#ifndef _PLOT_H_
#define _PLOT_H_
#include <qwt_plot.h>
#include <qwt_scale_div.h>
#include <qwt_series_data.h>
class QwtPlotCurve;
class QwtPlotIntervalCurve;
class Plot: public QwtPlot
{
Q_OBJECT
public:
enum Mode
{
Bars,
Tube
};
Plot( QWidget * = NULL );
public Q_SLOTS:
void setMode( int );
void exportPlot();
private:
void insertCurve( const QString &title,
const QVector<QPointF> &, const QColor & );
void insertErrorBars( const QString &title,
const QVector<QwtIntervalSample> &,
const QColor &color );
QwtScaleDiv yearScaleDiv() const;
QwtPlotIntervalCurve *d_intervalCurve;
QwtPlotCurve *d_curve;
};
#endif
| 335 |
5,169 | <filename>Specs/d/9/6/FideasLib/0.2.0/FideasLib.podspec.json
{
"name": "FideasLib",
"version": "0.2.0",
"summary": "Fideas Mobile Application Development SDK",
"description": "Bu SDK ile mobil uygulamanin Fideas uzerinden",
"homepage": "https://github.com/salyangoz/fideas-sdk",
"license": {
"type": "MIT",
"file": "/Users/serhatyalcin/Documents/IOS Development/Salyangoz/FideasLib/LICENSE.md"
},
"authors": {
"Salyangoz": "<EMAIL>"
},
"source": {
"git": "https://github.com/salyangoz/fideas-sdk.git",
"branch": "master",
"tag": "0.2.0"
},
"dependencies": {
"Alamofire": [
],
"AlamofireSwiftyJSON": [
],
"SWXMLHash": [
"~> 4.0.0"
]
},
"platforms": {
"ios": "9.0"
},
"source_files": "FideasLib/"
}
| 376 |
792 | <filename>chapters/lyon.json<gh_stars>100-1000
{
"name": "<NAME>",
"location": "Lyon",
"country": "FR",
"region": "Europe",
"organizers": ["maxlath"],
"website": "http://nodeschool.io/lyon",
"repo": "http://github.com/nodeschool/lyon"
} | 105 |
838 | #include "../lib/runner.h"
RUNNER("unit");
| 19 |
427 | ##===-- debuggerdriver.py ------------------------------------*- Python -*-===##
##
# The LLVM Compiler Infrastructure
##
# This file is distributed under the University of Illinois Open Source
# License. See LICENSE.TXT for details.
##
##===----------------------------------------------------------------------===##
import lldb
import lldbutil
import sys
from threading import Thread
class DebuggerDriver(Thread):
""" Drives the debugger and responds to events. """
def __init__(self, debugger, event_queue):
Thread.__init__(self)
self.event_queue = event_queue
# This is probably not great because it does not give liblldb a chance
# to clean up
self.daemon = True
self.initialize(debugger)
def initialize(self, debugger):
self.done = False
self.debugger = debugger
self.listener = debugger.GetListener()
if not self.listener.IsValid():
raise "Invalid listener"
self.listener.StartListeningForEventClass(self.debugger,
lldb.SBTarget.GetBroadcasterClassName(),
lldb.SBTarget.eBroadcastBitBreakpointChanged
#| lldb.SBTarget.eBroadcastBitModuleLoaded
#| lldb.SBTarget.eBroadcastBitModuleUnloaded
| lldb.SBTarget.eBroadcastBitWatchpointChanged
#| lldb.SBTarget.eBroadcastBitSymbolLoaded
)
self.listener.StartListeningForEventClass(self.debugger,
lldb.SBThread.GetBroadcasterClassName(),
lldb.SBThread.eBroadcastBitStackChanged
# lldb.SBThread.eBroadcastBitBreakpointChanged
| lldb.SBThread.eBroadcastBitThreadSuspended
| lldb.SBThread.eBroadcastBitThreadResumed
| lldb.SBThread.eBroadcastBitSelectedFrameChanged
| lldb.SBThread.eBroadcastBitThreadSelected
)
self.listener.StartListeningForEventClass(self.debugger,
lldb.SBProcess.GetBroadcasterClassName(),
lldb.SBProcess.eBroadcastBitStateChanged
| lldb.SBProcess.eBroadcastBitInterrupt
| lldb.SBProcess.eBroadcastBitSTDOUT
| lldb.SBProcess.eBroadcastBitSTDERR
| lldb.SBProcess.eBroadcastBitProfileData
)
self.listener.StartListeningForEventClass(self.debugger,
lldb.SBCommandInterpreter.GetBroadcasterClass(),
lldb.SBCommandInterpreter.eBroadcastBitThreadShouldExit
| lldb.SBCommandInterpreter.eBroadcastBitResetPrompt
| lldb.SBCommandInterpreter.eBroadcastBitQuitCommandReceived
| lldb.SBCommandInterpreter.eBroadcastBitAsynchronousOutputData
| lldb.SBCommandInterpreter.eBroadcastBitAsynchronousErrorData
)
def createTarget(self, target_image, args=None):
self.handleCommand("target create %s" % target_image)
if args is not None:
self.handleCommand("settings set target.run-args %s" % args)
def attachProcess(self, pid):
self.handleCommand("process attach -p %d" % pid)
pass
def loadCore(self, corefile):
self.handleCommand("target create -c %s" % corefile)
pass
def setDone(self):
self.done = True
def isDone(self):
return self.done
def getPrompt(self):
return self.debugger.GetPrompt()
def getCommandInterpreter(self):
return self.debugger.GetCommandInterpreter()
def getSourceManager(self):
return self.debugger.GetSourceManager()
def setSize(self, width, height):
# FIXME: respect height
self.debugger.SetTerminalWidth(width)
def getTarget(self):
return self.debugger.GetTargetAtIndex(0)
def handleCommand(self, cmd):
ret = lldb.SBCommandReturnObject()
self.getCommandInterpreter().HandleCommand(cmd, ret)
return ret
def eventLoop(self):
while not self.isDone():
event = lldb.SBEvent()
got_event = self.listener.WaitForEvent(lldb.UINT32_MAX, event)
if got_event and not event.IsValid():
self.winAddStr("Warning: Invalid or no event...")
continue
elif not event.GetBroadcaster().IsValid():
continue
self.event_queue.put(event)
def run(self):
self.eventLoop()
def terminate(self):
lldb.SBDebugger.Terminate()
sys.exit(0)
def createDriver(debugger, event_queue):
driver = DebuggerDriver(debugger, event_queue)
# driver.start()
# if pid specified:
# - attach to pid
# else if core file specified
# - create target from corefile
# else
# - create target from file
# - settings append target.run-args <args-from-cmdline>
# source .lldbinit file
return driver
| 3,109 |
3,212 | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.nifi.jms.processors.helpers;
import java.lang.reflect.InvocationHandler;
import java.lang.reflect.Method;
import java.lang.reflect.Proxy;
import java.util.List;
import java.util.Objects;
import java.util.concurrent.CopyOnWriteArrayList;
import java.util.concurrent.atomic.AtomicInteger;
import javax.jms.MessageProducer;
import javax.jms.Session;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* {@link Session} invocation handler for Session proxy instances.
* @see ConnectionFactoryInvocationHandler
*/
final class SessionInvocationHandler implements InvocationHandler {
private static final Logger LOGGER = LoggerFactory.getLogger(SessionInvocationHandler.class);
private final AtomicInteger closeCalled = new AtomicInteger();
private final List<MessageProducerInvocationHandler> handlers = new CopyOnWriteArrayList<>();
private final AtomicInteger openedProducers = new AtomicInteger();
private final Session session;
public SessionInvocationHandler(Session session) {
this.session = Objects.requireNonNull(session);
}
public Session getSession() {
return session;
}
@Override
public Object invoke(Object proxy, Method method, Object[] args) throws Throwable {
final Object o = session.getClass().getMethod(method.getName(), method.getParameterTypes()).invoke(session, args);
if (LOGGER.isDebugEnabled()) {
LOGGER.debug("Method {} called on {}", method.getName(), session);
}
if (method.getName().equals("createProducer")) {
MessageProducer messageProducer = (MessageProducer) o;
LOGGER.info("Created a Message Producer {} using session {}", messageProducer, session);
openedProducers.incrementAndGet();
MessageProducerInvocationHandler mp = new MessageProducerInvocationHandler(messageProducer);
handlers.add(mp);
MessageProducer messageProducerProxy = (MessageProducer) Proxy.newProxyInstance(o.getClass().getClassLoader(), new Class[] { MessageProducer.class }, mp);
return messageProducerProxy;
}
if ("close".equals(method.getName())) {
closeCalled.incrementAndGet();
LOGGER.info("Session close method called {} times for {}", closeCalled, session);
}
return o;
}
public boolean isClosed() {
boolean closed = closeCalled.get() >= 1;
for (MessageProducerInvocationHandler handler : handlers) {
boolean handlerClosed = handler.isClosed();
closed = closed && handlerClosed;
if (!handlerClosed) {
LOGGER.warn("MessageProducer is not closed {}", handler.getMessageProducer());
}
}
return closed;
}
public int openedProducers() {
return openedProducers.get();
}
}
| 1,216 |
750 | from datetime import datetime
import re
import json
from twisted.web.resource import Resource
from jasmin.protocols.http.validation import UrlArgsValidator, HttpAPICredentialValidator
from jasmin.protocols.http.errors import HttpApiError
from jasmin.protocols.http.endpoints import authenticate_user
class Balance(Resource):
isleaf = True
def __init__(self, RouterPB, stats, log):
Resource.__init__(self)
self.RouterPB = RouterPB
self.stats = stats
self.log = log
def render_GET(self, request):
"""
/balance request processing
Note: Balance is used by user to check his balance
"""
self.log.debug("Rendering /balance response with args: %s from %s",
request.args, request.getClientIP())
request.responseHeaders.addRawHeader(b"content-type", b"application/json")
response = {'return': None, 'status': 200}
self.stats.inc('request_count')
self.stats.set('last_request_at', datetime.now())
try:
# Validation
fields = {b'username': {'optional': False, 'pattern': re.compile(rb'^.{1,16}$')},
b'password': {'optional': False, 'pattern': re.compile(rb'^.{1,16}$')}}
# Make validation
v = UrlArgsValidator(request, fields)
v.validate()
# Authentication
user = authenticate_user(
request.args[b'username'][0],
request.args[b'password'][0],
self.RouterPB,
self.stats,
self.log
)
# Update CnxStatus
user.getCnxStatus().httpapi['connects_count'] += 1
user.getCnxStatus().httpapi['balance_request_count'] += 1
user.getCnxStatus().httpapi['last_activity_at'] = datetime.now()
# Make Credential validation
v = HttpAPICredentialValidator('Balance', user, request)
v.validate()
balance = user.mt_credential.getQuota('balance')
if balance is None:
balance = 'ND'
sms_count = user.mt_credential.getQuota('submit_sm_count')
if sms_count is None:
sms_count = 'ND'
response = {'return': {'balance': balance, 'sms_count': sms_count}, 'status': 200}
except HttpApiError as e:
self.log.error("Error: %s", e)
response = {'return': e.message, 'status': e.code}
except Exception as e:
self.log.error("Error: %s", e)
response = {'return': "Unknown error: %s" % e, 'status': 500}
finally:
self.log.debug("Returning %s to %s.", response, request.getClientIP())
# Return message
if response['return'] is None:
response['return'] = 'System error'
request.setResponseCode(500)
else:
request.setResponseCode(response['status'])
if isinstance(response['return'], bytes):
return json.dumps(response['return'].decode()).encode()
return json.dumps(response['return']).encode()
| 1,496 |
3,508 | package com.fishercoder.solutions;
public class _1524 {
public static class Solution1 {
/**
* This brute force solution will throw exceed time limit exceeded exception on LeetCode.
*/
public int numOfSubarrays(int[] arr) {
long oddCount = 0;
for (int i = 0; i < arr.length; i++) {
long subTotal = 0;
for (int j = i; j < arr.length; j++) {
subTotal += arr[j];
if (subTotal % 2 != 0) {
oddCount++;
}
}
}
return (int) oddCount % 1000000007;
}
}
public static class Solution2 {
public int numOfSubarrays(int[] arr) {
int oddSumCount = 0;
int evenSumCount = 1;
long result = 0;
int sum = 0;
for (int num : arr) {
sum += num;
if (sum % 2 == 0) {
result += oddSumCount;
} else {
result += evenSumCount;
}
if (sum % 2 == 0) {
evenSumCount++;
} else {
oddSumCount++;
}
result %= 1000000007;
}
return (int) result % 1000000007;
}
}
}
| 807 |
348 | <filename>docs/data/leg-t2/026/02603136.json
{"nom":"Val-Maravel","circ":"3ème circonscription","dpt":"Drôme","inscrits":53,"abs":12,"votants":41,"blancs":0,"nuls":11,"exp":30,"res":[{"nuance":"REM","nom":"<NAME>","voix":21},{"nuance":"LR","nom":"<NAME>","voix":9}]} | 111 |
777 | <gh_stars>100-1000
// Copyright (c) 2012 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include <stddef.h>
#include <utility>
#include <vector>
#include "base/bind.h"
#include "base/macros.h"
#include "base/message_loop/message_loop.h"
#include "base/run_loop.h"
#include "build/build_config.h"
#include "media/base/gmock_callback_support.h"
#include "media/base/media_util.h"
#include "media/base/mock_filters.h"
#include "media/base/test_helpers.h"
#include "media/filters/decoder_selector.h"
#include "media/filters/decrypting_demuxer_stream.h"
#include "testing/gtest/include/gtest/gtest.h"
using ::testing::_;
using ::testing::InvokeWithoutArgs;
using ::testing::IsNull;
using ::testing::NiceMock;
using ::testing::NotNull;
using ::testing::Return;
using ::testing::StrictMock;
// Use anonymous namespace here to prevent the actions to be defined multiple
// times across multiple test files. Sadly we can't use static for them.
namespace {
MATCHER(EncryptedConfig, "") {
return arg.is_encrypted();
}
MATCHER(ClearConfig, "") {
return !arg.is_encrypted();
}
} // namespace
namespace media {
class AudioDecoderSelectorTest : public ::testing::Test {
public:
enum DecryptorCapability {
kNoCdm, // No CDM. Only possible for clear stream.
kNoDecryptor, // CDM is available but Decryptor is not supported.
kDecryptOnly,
kDecryptAndDecode
};
AudioDecoderSelectorTest()
: media_log_(new MediaLog()),
traits_(media_log_),
demuxer_stream_(
new StrictMock<MockDemuxerStream>(DemuxerStream::AUDIO)),
decoder_1_(new StrictMock<MockAudioDecoder>()),
decoder_2_(new StrictMock<MockAudioDecoder>()) {
all_decoders_.push_back(decoder_1_);
all_decoders_.push_back(decoder_2_);
// |cdm_context_| and |decryptor_| are conditionally created in
// InitializeDecoderSelector().
}
~AudioDecoderSelectorTest() { base::RunLoop().RunUntilIdle(); }
MOCK_METHOD2(OnDecoderSelected,
void(AudioDecoder*, DecryptingDemuxerStream*));
void MockOnDecoderSelected(std::unique_ptr<AudioDecoder> decoder,
std::unique_ptr<DecryptingDemuxerStream> stream) {
OnDecoderSelected(decoder.get(), stream.get());
selected_decoder_ = std::move(decoder);
}
void UseClearStream() {
AudioDecoderConfig clear_audio_config(kCodecVorbis, kSampleFormatPlanarF32,
CHANNEL_LAYOUT_STEREO, 44100,
EmptyExtraData(), Unencrypted());
demuxer_stream_->set_audio_decoder_config(clear_audio_config);
}
void UseEncryptedStream() {
AudioDecoderConfig encrypted_audio_config(
kCodecVorbis, kSampleFormatPlanarF32, CHANNEL_LAYOUT_STEREO, 44100,
EmptyExtraData(), AesCtrEncryptionScheme());
demuxer_stream_->set_audio_decoder_config(encrypted_audio_config);
}
void InitializeDecoderSelector(DecryptorCapability decryptor_capability,
int num_decoders) {
if (decryptor_capability != kNoCdm) {
cdm_context_.reset(new StrictMock<MockCdmContext>());
if (decryptor_capability == kNoDecryptor) {
EXPECT_CALL(*cdm_context_, GetDecryptor())
.WillRepeatedly(Return(nullptr));
} else {
decryptor_.reset(new NiceMock<MockDecryptor>());
EXPECT_CALL(*cdm_context_, GetDecryptor())
.WillRepeatedly(Return(decryptor_.get()));
EXPECT_CALL(*decryptor_, InitializeAudioDecoder(_, _))
.WillRepeatedly(
RunCallback<1>(decryptor_capability == kDecryptAndDecode));
}
}
DCHECK_GE(all_decoders_.size(), static_cast<size_t>(num_decoders));
all_decoders_.erase(
all_decoders_.begin() + num_decoders, all_decoders_.end());
decoder_selector_.reset(new AudioDecoderSelector(
message_loop_.task_runner(), std::move(all_decoders_), media_log_));
}
void SelectDecoder() {
decoder_selector_->SelectDecoder(
&traits_, demuxer_stream_.get(), cdm_context_.get(),
base::Bind(&AudioDecoderSelectorTest::MockOnDecoderSelected,
base::Unretained(this)),
base::Bind(&AudioDecoderSelectorTest::OnDecoderOutput),
base::Bind(&AudioDecoderSelectorTest::OnWaitingForDecryptionKey));
base::RunLoop().RunUntilIdle();
}
void SelectDecoderAndDestroy() {
SelectDecoder();
EXPECT_CALL(*this, OnDecoderSelected(IsNull(), IsNull()));
decoder_selector_.reset();
base::RunLoop().RunUntilIdle();
}
static void OnDecoderOutput(const scoped_refptr<AudioBuffer>& output) {
NOTREACHED();
}
static void OnWaitingForDecryptionKey() {
NOTREACHED();
}
scoped_refptr<MediaLog> media_log_;
// Stream traits specific to audio decoding.
DecoderStreamTraits<DemuxerStream::AUDIO> traits_;
// Declare |decoder_selector_| after |demuxer_stream_| and |decryptor_| since
// |demuxer_stream_| and |decryptor_| should outlive |decoder_selector_|.
std::unique_ptr<StrictMock<MockDemuxerStream>> demuxer_stream_;
std::unique_ptr<StrictMock<MockCdmContext>> cdm_context_;
// Use NiceMock since we don't care about most of calls on the decryptor, e.g.
// RegisterNewKeyCB().
std::unique_ptr<NiceMock<MockDecryptor>> decryptor_;
std::unique_ptr<AudioDecoderSelector> decoder_selector_;
StrictMock<MockAudioDecoder>* decoder_1_;
StrictMock<MockAudioDecoder>* decoder_2_;
ScopedVector<AudioDecoder> all_decoders_;
std::unique_ptr<AudioDecoder> selected_decoder_;
base::MessageLoop message_loop_;
private:
DISALLOW_COPY_AND_ASSIGN(AudioDecoderSelectorTest);
};
// TODO(xhwang): Add kNoCdm tests for clear stream.
// The stream is not encrypted but we have no clear decoder. No decoder can be
// selected.
TEST_F(AudioDecoderSelectorTest, ClearStream_NoDecryptor_NoClearDecoder) {
UseClearStream();
InitializeDecoderSelector(kNoDecryptor, 0);
EXPECT_CALL(*this, OnDecoderSelected(IsNull(), IsNull()));
SelectDecoder();
}
// The stream is not encrypted and we have one clear decoder. The decoder
// will be selected.
TEST_F(AudioDecoderSelectorTest, ClearStream_NoDecryptor_OneClearDecoder) {
UseClearStream();
InitializeDecoderSelector(kNoDecryptor, 1);
EXPECT_CALL(*decoder_1_, Initialize(ClearConfig(), _, _, _))
.WillOnce(RunCallback<2>(true));
EXPECT_CALL(*this, OnDecoderSelected(decoder_1_, IsNull()));
SelectDecoder();
}
TEST_F(AudioDecoderSelectorTest,
Destroy_ClearStream_NoDecryptor_OneClearDecoder) {
UseClearStream();
InitializeDecoderSelector(kNoDecryptor, 1);
EXPECT_CALL(*decoder_1_, Initialize(ClearConfig(), _, _, _));
SelectDecoderAndDestroy();
}
// The stream is not encrypted and we have multiple clear decoders. The first
// decoder that can decode the input stream will be selected.
TEST_F(AudioDecoderSelectorTest, ClearStream_NoDecryptor_MultipleClearDecoder) {
UseClearStream();
InitializeDecoderSelector(kNoDecryptor, 2);
EXPECT_CALL(*decoder_1_, Initialize(ClearConfig(), _, _, _))
.WillOnce(RunCallback<2>(false));
EXPECT_CALL(*decoder_2_, Initialize(ClearConfig(), _, _, _))
.WillOnce(RunCallback<2>(true));
EXPECT_CALL(*this, OnDecoderSelected(decoder_2_, IsNull()));
SelectDecoder();
}
TEST_F(AudioDecoderSelectorTest,
Destroy_ClearStream_NoDecryptor_MultipleClearDecoder) {
UseClearStream();
InitializeDecoderSelector(kNoDecryptor, 2);
EXPECT_CALL(*decoder_1_, Initialize(ClearConfig(), _, _, _))
.WillOnce(RunCallback<2>(false));
EXPECT_CALL(*decoder_2_, Initialize(ClearConfig(), _, _, _));
SelectDecoderAndDestroy();
}
// There is a decryptor but the stream is not encrypted. The decoder will be
// selected.
TEST_F(AudioDecoderSelectorTest, ClearStream_HasDecryptor) {
UseClearStream();
InitializeDecoderSelector(kDecryptOnly, 1);
EXPECT_CALL(*decoder_1_, Initialize(ClearConfig(), _, _, _))
.WillOnce(RunCallback<2>(true));
EXPECT_CALL(*this, OnDecoderSelected(decoder_1_, IsNull()));
SelectDecoder();
}
TEST_F(AudioDecoderSelectorTest, Destroy_ClearStream_HasDecryptor) {
UseClearStream();
InitializeDecoderSelector(kDecryptOnly, 1);
EXPECT_CALL(*decoder_1_, Initialize(ClearConfig(), _, _, _));
SelectDecoderAndDestroy();
}
// The stream is encrypted and there's no decryptor. The decoder only supports
// clear streams so no decoder can be selected.
TEST_F(AudioDecoderSelectorTest, EncryptedStream_NoDecryptor_OneClearDecoder) {
UseEncryptedStream();
InitializeDecoderSelector(kNoDecryptor, 1);
EXPECT_CALL(*decoder_1_, Initialize(EncryptedConfig(), _, _, _))
.WillOnce(RunCallback<2>(false));
EXPECT_CALL(*this, OnDecoderSelected(IsNull(), IsNull()));
SelectDecoder();
}
TEST_F(AudioDecoderSelectorTest,
Destroy_EncryptedStream_NoDecryptor_OneClearDecoder) {
UseEncryptedStream();
InitializeDecoderSelector(kNoDecryptor, 1);
EXPECT_CALL(*decoder_1_, Initialize(EncryptedConfig(), _, _, _));
SelectDecoderAndDestroy();
}
// The stream is encrypted and there's no decryptor. There are multiple decoders
// and the first one that supports encrypted streams is selected.
TEST_F(AudioDecoderSelectorTest, EncryptedStream_NoDecryptor_MultipleDecoders) {
UseEncryptedStream();
InitializeDecoderSelector(kNoDecryptor, 2);
EXPECT_CALL(*decoder_1_, Initialize(EncryptedConfig(), _, _, _))
.WillOnce(RunCallback<2>(false));
EXPECT_CALL(*decoder_2_, Initialize(EncryptedConfig(), _, _, _))
.WillOnce(RunCallback<2>(true));
EXPECT_CALL(*this, OnDecoderSelected(decoder_2_, IsNull()));
SelectDecoder();
}
TEST_F(AudioDecoderSelectorTest,
Destroy_EncryptedStream_NoDecryptor_MultipleDecoders) {
UseEncryptedStream();
InitializeDecoderSelector(kNoDecryptor, 2);
EXPECT_CALL(*decoder_1_, Initialize(EncryptedConfig(), _, _, _))
.WillOnce(RunCallback<2>(false));
EXPECT_CALL(*decoder_2_, Initialize(EncryptedConfig(), _, _, _));
SelectDecoderAndDestroy();
}
// Decryptor can only do decryption and there's no decoder available. No decoder
// can be selected.
TEST_F(AudioDecoderSelectorTest, EncryptedStream_DecryptOnly_NoClearDecoder) {
UseEncryptedStream();
InitializeDecoderSelector(kDecryptOnly, 0);
EXPECT_CALL(*this, OnDecoderSelected(IsNull(), IsNull()));
SelectDecoder();
}
// Decryptor can do decryption-only and there's a decoder available. The decoder
// will be selected and a DecryptingDemuxerStream will be created.
TEST_F(AudioDecoderSelectorTest, EncryptedStream_DecryptOnly_OneClearDecoder) {
UseEncryptedStream();
InitializeDecoderSelector(kDecryptOnly, 1);
EXPECT_CALL(*decoder_1_, Initialize(ClearConfig(), _, _, _))
.WillOnce(RunCallback<2>(true));
EXPECT_CALL(*this, OnDecoderSelected(decoder_1_, NotNull()));
SelectDecoder();
}
TEST_F(AudioDecoderSelectorTest,
Destroy_EncryptedStream_DecryptOnly_OneClearDecoder) {
UseEncryptedStream();
InitializeDecoderSelector(kDecryptOnly, 1);
EXPECT_CALL(*decoder_1_, Initialize(ClearConfig(), _, _, _));
SelectDecoderAndDestroy();
}
// Decryptor can only do decryption and there are multiple decoders available.
// The first decoder that can decode the input stream will be selected and
// a DecryptingDemuxerStream will be created.
TEST_F(AudioDecoderSelectorTest,
EncryptedStream_DecryptOnly_MultipleClearDecoder) {
UseEncryptedStream();
InitializeDecoderSelector(kDecryptOnly, 2);
EXPECT_CALL(*decoder_1_, Initialize(ClearConfig(), _, _, _))
.WillOnce(RunCallback<2>(false));
EXPECT_CALL(*decoder_2_, Initialize(ClearConfig(), _, _, _))
.WillOnce(RunCallback<2>(true));
EXPECT_CALL(*this, OnDecoderSelected(decoder_2_, NotNull()));
SelectDecoder();
}
TEST_F(AudioDecoderSelectorTest,
Destroy_EncryptedStream_DecryptOnly_MultipleClearDecoder) {
UseEncryptedStream();
InitializeDecoderSelector(kDecryptOnly, 2);
EXPECT_CALL(*decoder_1_, Initialize(ClearConfig(), _, _, _))
.WillOnce(RunCallback<2>(false));
EXPECT_CALL(*decoder_2_, Initialize(ClearConfig(), _, _, _));
SelectDecoderAndDestroy();
}
// Decryptor can do decryption and decoding.
TEST_F(AudioDecoderSelectorTest, EncryptedStream_DecryptAndDecode) {
UseEncryptedStream();
InitializeDecoderSelector(kDecryptAndDecode, 1);
#if !defined(OS_ANDROID)
// A DecryptingVideoDecoder will be created and selected. The clear decoder
// should not be touched at all. No DecryptingDemuxerStream should to be
// created.
EXPECT_CALL(*this, OnDecoderSelected(NotNull(), IsNull()));
#else
// A DecryptingDemuxerStream will be created. The clear decoder will be
// initialized and returned.
EXPECT_CALL(*decoder_1_, Initialize(ClearConfig(), _, _, _))
.WillOnce(RunCallback<2>(true));
EXPECT_CALL(*this, OnDecoderSelected(NotNull(), NotNull()));
#endif
SelectDecoder();
}
} // namespace media
| 4,873 |
349 | <filename>src/types.h
#ifndef TYPES_H
#define TYPES_H
#include <stdint.h>
#include <stdbool.h>
#include <string.h>
static int VERBOSE = 0;
static bool SINGLE_STEP = false;
typedef uint32_t uint;
typedef uint32_t uint;
typedef struct {
uint data[4096];
uint privilege;
} csr_state;
typedef struct {
uint rbr_thr_ier_iir;
uint lcr_mcr_lsr_scr;
bool thre_ip;
bool interrupting;
} uart_state;
typedef struct {
bool msip;
uint mtimecmp_lo;
uint mtimecmp_hi;
uint mtime_lo;
uint mtime_hi;
} clint_state;
typedef struct {
uint mode;
uint ppn;
} mmu_state;
typedef struct {
uint clock;
uint xreg[32];
uint pc;
uint8_t *mem;
uint8_t *dtb;
uint8_t *mtd;
uint mtd_size;
csr_state csr;
clint_state clint;
uart_state uart;
mmu_state mmu;
bool reservation_en;
uint reservation_addr;
} cpu_t;
typedef struct {
bool en;
bool irq;
uint type;
uint value;
} trap;
typedef struct {
uint write_reg;
uint write_val;
uint pc_val;
uint csr_write;
uint csr_val;
trap trap;
} ins_ret;
ins_ret ins_ret_noop(cpu_t *cpu) {
ins_ret ret;
memset(&ret, 0, sizeof(ins_ret));
ret.pc_val = cpu->pc + 4;
return ret;
}
uint sign_extend(uint x, uint b) {
uint m = ((uint)1) << (b - 1);
return (x ^ m) - m;
}
static cpu_t cpu;
#endif
| 667 |
619 | <filename>examples/python/kxtj3.py
#!/usr/bin/env python
# The MIT License (MIT)
#
# Author: <NAME>
# Copyright (c) 2018 Rohm Semiconductor.
#
# Permission is hereby granted, free of charge, to any person obtaining a copy of
# this software and associated documentation files (the "Software"), to deal in
# the Software without restriction, including without limitation the rights to
# use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of
# the Software, and to permit persons to whom the Software is furnished to do so,
# subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS
# FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
# COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
# IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
import time, sys, signal, atexit
from upm import pyupm_kxtj3
def main():
kxtj3_sensor = pyupm_kxtj3.KXTJ3(0x0, 0x0f)
kxtj3_sensor.SensorInit(pyupm_kxtj3.KXTJ3_ODR_25,
pyupm_kxtj3.HIGH_RES,
pyupm_kxtj3.KXTJ3_RANGE_16G_14)
# Prevent stack printing on CTRL^C
def SIGINTHandler(signum, frame):
raise SystemExit
def exitHandler():
print("Exiting")
sys.exit(0)
atexit.register(exitHandler)
signal.signal(signal.SIGINT, SIGINTHandler)
sampleCounter = 10
waitTime = kxtj3_sensor.GetAccelerationSamplePeriod()
print("Setting settings:\nODR: 25 Hz\nResolution: "
"High\nAcceleration range: 16g with 14bits")
print("Acceleration:")
while sampleCounter > 0:
[x, y, z] = kxtj3_sensor.GetAccelerationVector()
print ("x: %0.02f, y: %0.02f, z: %0.02f" % (x, y, z))
time.sleep(waitTime)
sampleCounter -= 1
if __name__ == '__main__':
main()
| 818 |
15,056 | #!/usr/bin/env python
#
# A simple benchmark of tornado template rendering, based on
# https://github.com/mitsuhiko/jinja2/blob/master/examples/bench.py
import sys
from timeit import Timer
from tornado.options import options, define, parse_command_line
from tornado.template import Template
define('num', default=100, help='number of iterations')
define('dump', default=False, help='print template generated code and exit')
context = {
'page_title': 'mitsuhiko\'s benchmark',
'table': [dict(a=1, b=2, c=3, d=4, e=5,
f=6, g=7, h=8, i=9, j=10) for x in range(1000)]
}
tmpl = Template("""\
<!doctype html>
<html>
<head>
<title>{{ page_title }}</title>
</head>
<body>
<div class="header">
<h1>{{ page_title }}</h1>
</div>
<ul class="navigation">
{% for href, caption in [ \
('index.html', 'Index'), \
('downloads.html', 'Downloads'), \
('products.html', 'Products') \
] %}
<li><a href="{{ href }}">{{ caption }}</a></li>
{% end %}
</ul>
<div class="table">
<table>
{% for row in table %}
<tr>
{% for cell in row %}
<td>{{ cell }}</td>
{% end %}
</tr>
{% end %}
</table>
</div>
</body>
</html>\
""")
def render():
tmpl.generate(**context)
def main():
parse_command_line()
if options.dump:
print(tmpl.code)
sys.exit(0)
t = Timer(render)
results = t.timeit(options.num) / options.num
print('%0.3f ms per iteration' % (results * 1000))
if __name__ == '__main__':
main()
| 715 |
375 | /*
* This file is part of the Wayback archival access software
* (http://archive-access.sourceforge.net/projects/wayback/).
*
* Licensed to the Internet Archive (IA) by one or more individual
* contributors.
*
* The IA licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.archive.wayback.replay.selector;
import java.net.MalformedURLException;
import java.net.URL;
import java.util.List;
import org.archive.wayback.core.CaptureSearchResult;
import org.archive.wayback.core.Resource;
import org.archive.wayback.core.WaybackRequest;
/**
* Class which allows matching based on:
*
* a) one of several strings, any of which being found in the path cause match
* b) one of several strings, any of which being found in the query cause match
* c) one of several strings, *ALL* of which being found in the url cause match
*
* @author brad
* @version $Date$, $Revision$
*/
public class PathMatchSelector extends BaseReplayRendererSelector {
private List<String> pathContains = null;
private List<String> queryContains = null;
private List<String> urlContainsAll = null;
@Override
public boolean canHandle(WaybackRequest wbRequest,
CaptureSearchResult result, Resource httpHeadersResource,
Resource payloadResource) {
if (isResourceTooBig(payloadResource)) {
return false;
}
try {
URL url = new URL(result.getOriginalUrl());
if(urlContainsAll != null) {
String path = url.toString();
for(String test : urlContainsAll) {
if(path.indexOf(test) == -1) {
return false;
}
}
return true;
}
if(pathContains != null) {
String path = url.getPath();
for(String test : pathContains) {
if(path.indexOf(test) != -1) {
return true;
}
}
}
if(queryContains != null) {
String query = url.getQuery();
if(query != null) {
for(String test : queryContains) {
if(query.indexOf(test) != -1) {
return true;
}
}
}
}
} catch (MalformedURLException e) {
// just eat it.
}
return false;
}
/**
* @return list of Strings, any of which being found in the path cause a
* match
*/
public List<String> getPathContains() {
return pathContains;
}
/**
* @param pathContains list of Strings, any of which being found in the
* path cause a match
*/
public void setPathContains(List<String> pathContains) {
this.pathContains = pathContains;
}
/**
* @return list of Strings, *ALL* of which must be found somewhere in the
* URL to cause a match
*/
public List<String> getUrlContainsAll() {
return urlContainsAll;
}
/**
* @param urlContainsAll list of Strings, *ALL* of which must be found
* somewhere in the URL to cause a match
*/
public void setUrlContainsAll(List<String> urlContainsAll) {
this.urlContainsAll = urlContainsAll;
}
/**
* @return list of Strings, any of which being found in the query cause a
* match
*/
public List<String> getQueryContains() {
return queryContains;
}
/**
* @param queryContains list of Strings, any of which being found in the
* query cause a match
*/
public void setQueryContains(List<String> queryContains) {
this.queryContains = queryContains;
}
}
| 1,276 |
335 | {
"word": "Interpretation",
"definitions": [
"The action of explaining the meaning of something.",
"An explanation or way of explaining.",
"A stylistic representation of a creative work or dramatic role."
],
"parts-of-speech": "Noun"
} | 99 |
2,251 | <gh_stars>1000+
// Copyright (c) .NET Foundation and Contributors (https://dotnetfoundation.org/ & https://stride3d.net) and Silicon Studio Corp. (https://www.siliconstudio.co.jp)
// Distributed under the MIT license. See the LICENSE.md file in the project root for more information.
#pragma once
#include <assimp/scene.h>
#define _AI_MATKEY_TEXTYPE_BASE "$tex.type"
#define _AI_MATKEY_TEXCOLOR_BASE "$tex.color"
#define _AI_MATKEY_TEXALPHA_BASE "$tex.alpha"
#define AI_MATKEY_TEXTYPE(type, N) _AI_MATKEY_TEXTYPE_BASE,type,N
#define AI_MATKEY_TEXCOLOR(type,N) _AI_MATKEY_TEXCOLOR_BASE,type,N
#define AI_MATKEY_TEXALPHA(type,N) _AI_MATKEY_TEXALPHA_BASE,type,N
/// <summary>
/// Enumeration of the different types of node in the new Assimp's material stack.
/// Don't forget to update the dictionnary in Materials.cpp when modifying this enum.
/// </summary>
enum aiStackType {
aiStackType_ColorType,
aiStackType_TextureType,
aiStackType_BlendOpType,
aiStackType_NumberTypes
};
/// <summary>
/// Enumeration of the new Assimp's flags.
/// </summary>
enum aiStackFlags {
aiStackFlags_Invert = 1,
aiStackFlags_ReplaceAlpha = 2
};
#define aiStackFlags_NumbeFlags 2
/// <summary>
/// Enumeration of the different operations in the new Assimp's material stack.
/// Don't forget to update the dictionnary in Materials.cpp when modifying this enum.
/// </summary>
enum aiStackOperation {
aiStackOperation_Add = 0,
aiStackOperation_Add3ds,
aiStackOperation_AddMaya,
aiStackOperation_Average,
aiStackOperation_Color,
aiStackOperation_ColorBurn,
aiStackOperation_ColorDodge,
aiStackOperation_Darken3ds,
aiStackOperation_DarkenMaya,
aiStackOperation_Desaturate,
aiStackOperation_Difference3ds,
aiStackOperation_DifferenceMaya,
aiStackOperation_Divide,
aiStackOperation_Exclusion,
aiStackOperation_HardLight,
aiStackOperation_HardMix,
aiStackOperation_Hue,
aiStackOperation_Illuminate,
aiStackOperation_In,
aiStackOperation_Lighten3ds,
aiStackOperation_LightenMaya,
aiStackOperation_LinearBurn,
aiStackOperation_LinearDodge,
aiStackOperation_Multiply3ds,
aiStackOperation_MultiplyMaya,
aiStackOperation_None,
aiStackOperation_Out,
aiStackOperation_Over3ds,
aiStackOperation_Overlay3ds,
aiStackOperation_OverMaya,
aiStackOperation_PinLight,
aiStackOperation_Saturate,
aiStackOperation_Saturation,
aiStackOperation_Screen,
aiStackOperation_SoftLight,
aiStackOperation_Substract3ds,
aiStackOperation_SubstractMaya,
aiStackOperation_Value,
aiStackOperation_Mask,
aiStackOperation_Unknown,
aiStackOperation_NumberOperations
};
| 880 |
3,603 | /*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.trino.server.security;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import org.testng.annotations.Test;
import java.io.IOException;
import java.nio.file.Files;
import java.nio.file.Path;
import java.util.Map;
import static io.airlift.configuration.testing.ConfigAssertions.assertFullMapping;
import static io.airlift.configuration.testing.ConfigAssertions.assertRecordedDefaults;
import static io.airlift.configuration.testing.ConfigAssertions.recordDefaults;
public class TestHeaderAuthenticatorConfig
{
@Test
public void testDefaults()
{
assertRecordedDefaults(recordDefaults(HeaderAuthenticatorConfig.class)
.setHeaderAuthenticatorFiles(ImmutableList.of("etc/header-authenticator.properties"))
.setUserMappingPattern(null)
.setUserMappingFile(null));
}
@Test
public void testExplicitPropertyMappings()
throws IOException
{
Path userMappingFile = Files.createTempFile(null, null);
Path config1 = Files.createTempFile(null, null);
Path config2 = Files.createTempFile(null, null);
Map<String, String> properties = new ImmutableMap.Builder<String, String>()
.put("http-server.authentication.header.user-mapping.pattern", "(.*)@something")
.put("http-server.authentication.header.user-mapping.file", userMappingFile.toString())
.put("header-authenticator.config-files", config1.toString() + "," + config2.toString())
.build();
HeaderAuthenticatorConfig expected = new HeaderAuthenticatorConfig()
.setHeaderAuthenticatorFiles(ImmutableList.of(config1.toAbsolutePath().toString(), config2.toAbsolutePath().toString()))
.setUserMappingPattern("(.*)@something")
.setUserMappingFile(userMappingFile.toFile());
assertFullMapping(properties, expected);
}
}
| 894 |
809 | #ifndef E2K_LIMITS_H_
#define E2K_LIMITS_H_
#include <asm-generic/limits32.h>
#endif /* E2K_LIMITS_H_ */
| 58 |
631 | package app.controllers;
import org.javalite.activeweb.AppIntegrationSpec;
import org.junit.Test;
public class SessionFacadeSpec extends AppIntegrationSpec {
@Test
public void shouldRemoveSessionAttributes(){
controller("session").get("add-to-session");
the(session().get("greeting")).shouldNotBeNull();
the(session().get("dumb-object")).shouldNotBeNull();
the(session().size()).shouldBeEqual(2);
controller("session").get("remove-from-session");
the(session().get("greeting")).shouldBeNull();
the(session().get("dumb-object")).shouldBeNull();
the(session().size()).shouldBeEqual(0);
the(responseContent()).shouldContain("app.controllers.SessionController$Dumb");
}
@Test
public void shouldSessionNotCreated() {
controller("session").get("remove_from_session");
a("not found".equals(responseContent())).shouldBeTrue();
a(statusCode()).shouldBeEqual(404);
a(session().exists()).shouldBeFalse();
}
}
| 389 |
573 | /*
* This software is licensed under the terms of the MIT License.
* See COPYING for further information.
* ---
* Copyright (c) 2011-2019, <NAME> <<EMAIL>>.
* Copyright (c) 2012-2019, <NAME> <<EMAIL>>.
*/
#pragma once
#include "taisei.h"
#include <time.h>
#ifdef TAISEI_BUILDCONF_HAVE_TIMESPEC
typedef struct timespec SystemTime;
#else
typedef struct SystemTime {
time_t tv_sec;
long tv_nsec;
} SystemTime;
#endif
void get_system_time(SystemTime *time) attr_nonnull(1);
| 183 |
1,056 | <reponame>Antholoj/netbeans
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.netbeans.modules.j2ee.ddloaders.common;
import org.openide.cookies.SaveCookie;
import org.openide.filesystems.FileObject;
import org.openide.loaders.MultiFileLoader;
import org.openide.util.RequestProcessor;
import org.netbeans.modules.j2ee.ddloaders.common.xmlutils.XMLJ2eeDataObject;
import org.netbeans.modules.j2ee.ddloaders.common.xmlutils.XMLJ2eeUtils;
import javax.swing.text.Document;
import java.io.IOException;
import java.util.logging.Level;
import java.util.logging.Logger;
import javax.swing.SwingUtilities;
import org.openide.filesystems.MIMEResolver;
/** Represents a DD2beansDataObject in the Repository.
*
* @author mkuchtiak
*/
@MIMEResolver.Registration(
displayName="org.netbeans.modules.j2ee.ddloaders.Bundle#DDLoadersResolver",
position=340,
resource="../resources/dd-loaders-mime-resolver.xml"
)
public abstract class DD2beansDataObject extends XMLJ2eeDataObject implements org.openide.nodes.CookieSet.Factory{
private static final int DELAY_FOR_TIMER=200;
/** Private request processor for parsing and text generating tasks */
protected final static RequestProcessor RP = new RequestProcessor("XML Parsing"); //NOI18N
private final RequestProcessor.Task generationTask;
// constructor settings
private String prefixMark;
private static final long serialVersionUID = -5363900668319174348L;
public DD2beansDataObject(FileObject pf, MultiFileLoader loader)
throws org.openide.loaders.DataObjectExistsException {
this (pf, loader,true);
}
public DD2beansDataObject(FileObject pf, MultiFileLoader loader, final boolean saveAfterNodeChanges)
throws org.openide.loaders.DataObjectExistsException {
super (pf, loader);
generationTask = RP.create(new Runnable() {
int numberOfStartedGens;
public void run() {
numberOfStartedGens++;
final String newDoc = generateDocument();
SwingUtilities.invokeLater(new Runnable() {
public void run() {
try {
Document doc = getEditorSupport().openDocument();
XMLJ2eeUtils.replaceDocument(doc, newDoc, prefixMark);
setDocumentValid(true);
if (saveAfterNodeChanges) {
SaveCookie savec = (SaveCookie) getCookie(SaveCookie.class);
if (savec != null) {
savec.save();
}
}
// this is necessary for correct undo behaviour
getEditorSupport().getUndo().discardAllEdits();
} catch (javax.swing.text.BadLocationException e) {
Logger.getLogger("global").log(Level.INFO, null, e);
} catch (IOException e) {
Logger.getLogger("global").log(Level.INFO, null, e);
} finally {
synchronized (generationTask) {
numberOfStartedGens--;
if (numberOfStartedGens == 0) {
nodeDirty = false;
}
}
}
}
});
}
});
}
/** Create document from the Node. This method is called after Node (Node properties)is changed.
* The document is generated from data modul (isDocumentGenerable=true)
*/
protected abstract String generateDocument();
/** setter for prefixMark. This is information, which prefix in xml document should be preserved
* after replacing by new generated document (This is mainly for preserving comments at the beginning)
* @param prefix prefixMark
*/
protected final void setPrefixMark(String prefix) {
this.prefixMark=prefix;
}
/** gettert for prefixMark
* @return prefixMark
*/
protected final String getPrefixMark() {
return prefixMark;
}
/** Setter for property nodeDirty.
* @param dirty New value of property nodeDirty.
*/
@Override
public void setNodeDirty(boolean dirty){
//System.out.println("setNodeDirty("+dirty+")");
if (dirty) {
synchronized (this) {
nodeDirty=true;
restartGen();
}
}
}
public RequestProcessor.Task getGenerationTask(){
return generationTask;
}
protected void restartGen() {
generationTask.schedule(DELAY_FOR_TIMER);
}
}
| 2,382 |
11,719 | <filename>local_addons/ofxFaceTracker2/libs/dlib/include/dlib/reference_counter.h
// Copyright (C) 2003 <NAME> (<EMAIL>)
// License: Boost Software License See LICENSE.txt for the full license.
#ifndef DLIB_REFERENCE_COUNTEr_
#define DLIB_REFERENCE_COUNTEr_
#include "reference_counter/reference_counter_kernel_1.h"
#include "algs.h"
namespace dlib
{
template <
typename T,
typename copy = copy_functor<T>
>
class reference_counter
{
reference_counter() {}
public:
//----------- kernels ---------------
// kernel_1a
typedef reference_counter_kernel_1<T,copy>
kernel_1a;
};
}
#endif // DLIB_REFERENCE_COUNTEr_
| 329 |
1,538 | <filename>musicdemo/src/main/java/com/spinytech/musicdemo/StopAction.java
package com.spinytech.musicdemo;
import android.content.Context;
import android.content.Intent;
import com.spinytech.macore.MaAction;
import com.spinytech.macore.MaActionResult;
import com.spinytech.macore.tools.Logger;
import java.util.HashMap;
/**
* Created by wanglei on 2016/12/28.
*/
public class StopAction extends MaAction {
@Override
public boolean isAsync(Context context, HashMap<String, String> requestData) {
return false;
}
@Override
public MaActionResult invoke(Context context, HashMap<String, String> requestData) {
Intent intent = new Intent(context, MusicService.class);
intent.putExtra("command", "stop");
context.startService(intent);
MaActionResult result = new MaActionResult.Builder()
.code(MaActionResult.CODE_SUCCESS)
.msg("stop success")
.data("")
.object(null)
.build();
Logger.d("StopAction", "\nStopAction end: " + System.currentTimeMillis());
return result;
}
}
| 453 |
841 | package com.ctrip.framework.apollo.use.cases.dynamic.datasource.ds;
import java.io.PrintWriter;
import java.sql.Connection;
import java.sql.SQLException;
import java.sql.SQLFeatureNotSupportedException;
import java.util.concurrent.atomic.AtomicReference;
import javax.sql.DataSource;
/**
* A sample refreshable data source
*/
public class DynamicDataSource implements DataSource {
private final AtomicReference<DataSource> dataSourceAtomicReference;
public DynamicDataSource(DataSource dataSource) {
dataSourceAtomicReference = new AtomicReference<>(dataSource);
}
/**
* set the new data source and return the previous one
*/
public DataSource setDataSource(DataSource newDataSource){
return dataSourceAtomicReference.getAndSet(newDataSource);
}
@Override
public Connection getConnection() throws SQLException {
return dataSourceAtomicReference.get().getConnection();
}
@Override
public Connection getConnection(String username, String password) throws SQLException {
return dataSourceAtomicReference.get().getConnection(username, password);
}
@Override
public <T> T unwrap(Class<T> iface) throws SQLException {
return dataSourceAtomicReference.get().unwrap(iface);
}
@Override
public boolean isWrapperFor(Class<?> iface) throws SQLException {
return dataSourceAtomicReference.get().isWrapperFor(iface);
}
@Override
public PrintWriter getLogWriter() throws SQLException {
return dataSourceAtomicReference.get().getLogWriter();
}
@Override
public void setLogWriter(PrintWriter out) throws SQLException {
dataSourceAtomicReference.get().setLogWriter(out);
}
@Override
public void setLoginTimeout(int seconds) throws SQLException {
dataSourceAtomicReference.get().setLoginTimeout(seconds);
}
@Override
public int getLoginTimeout() throws SQLException {
return dataSourceAtomicReference.get().getLoginTimeout();
}
@Override
public java.util.logging.Logger getParentLogger() throws SQLFeatureNotSupportedException {
return dataSourceAtomicReference.get().getParentLogger();
}
}
| 617 |
575 | <gh_stars>100-1000
// Copyright 2019 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "weblayer/browser/weblayer_browser_interface_binders.h"
#include "base/bind.h"
#include "build/build_config.h"
#include "components/no_state_prefetch/browser/no_state_prefetch_contents.h"
#include "components/no_state_prefetch/browser/no_state_prefetch_processor_impl.h"
#include "components/no_state_prefetch/common/prerender_canceler.mojom.h"
#include "content/public/browser/browser_context.h"
#include "content/public/browser/render_frame_host.h"
#include "content/public/browser/web_contents.h"
#include "content/public/browser/web_ui.h"
#include "content/public/browser/web_ui_controller.h"
#include "third_party/blink/public/mojom/payments/payment_request.mojom.h"
#include "third_party/blink/public/mojom/prerender/prerender.mojom.h"
#include "weblayer/browser/no_state_prefetch/no_state_prefetch_processor_impl_delegate_impl.h"
#include "weblayer/browser/no_state_prefetch/prerender_utils.h"
#include "weblayer/browser/translate_client_impl.h"
#include "weblayer/browser/webui/weblayer_internals.mojom.h"
#include "weblayer/browser/webui/weblayer_internals_ui.h"
#if defined(OS_ANDROID)
#include "mojo/public/cpp/bindings/self_owned_receiver.h"
#include "services/service_manager/public/cpp/interface_provider.h"
#include "third_party/blink/public/mojom/installedapp/installed_app_provider.mojom.h"
#include "third_party/blink/public/mojom/webshare/webshare.mojom.h"
#endif
namespace weblayer {
namespace {
void BindContentTranslateDriver(
content::RenderFrameHost* host,
mojo::PendingReceiver<translate::mojom::ContentTranslateDriver> receiver) {
// Translation does not currently work in subframes.
// TODO(crbug.com/1073370): Transition WebLayer to per-frame translation
// architecture once it's ready.
if (host->GetParent())
return;
auto* contents = content::WebContents::FromRenderFrameHost(host);
if (!contents)
return;
TranslateClientImpl* const translate_client =
TranslateClientImpl::FromWebContents(contents);
translate_client->translate_driver()->AddReceiver(std::move(receiver));
}
void BindPageHandler(
content::RenderFrameHost* host,
mojo::PendingReceiver<weblayer_internals::mojom::PageHandler> receiver) {
auto* contents = content::WebContents::FromRenderFrameHost(host);
if (!contents)
return;
content::WebUI* web_ui = contents->GetWebUI();
// Performs a safe downcast to the concrete WebUIController subclass.
WebLayerInternalsUI* concrete_controller =
web_ui ? web_ui->GetController()->GetAs<WebLayerInternalsUI>() : nullptr;
// This is expected to be called only for main frames and for the right
// WebUI pages matching the same WebUI associated to the RenderFrameHost.
if (host->GetParent() || !concrete_controller)
return;
concrete_controller->BindInterface(std::move(receiver));
}
void BindNoStatePrefetchProcessor(
content::RenderFrameHost* frame_host,
mojo::PendingReceiver<blink::mojom::NoStatePrefetchProcessor> receiver) {
prerender::NoStatePrefetchProcessorImpl::Create(
frame_host, std::move(receiver),
std::make_unique<NoStatePrefetchProcessorImplDelegateImpl>());
}
void BindPrerenderCanceler(
content::RenderFrameHost* frame_host,
mojo::PendingReceiver<prerender::mojom::PrerenderCanceler> receiver) {
auto* web_contents = content::WebContents::FromRenderFrameHost(frame_host);
if (!web_contents)
return;
auto* no_state_prefetch_contents =
NoStatePrefetchContentsFromWebContents(web_contents);
if (!no_state_prefetch_contents)
return;
no_state_prefetch_contents->AddPrerenderCancelerReceiver(std::move(receiver));
}
#if defined(OS_ANDROID)
template <typename Interface>
void ForwardToJavaWebContents(content::RenderFrameHost* frame_host,
mojo::PendingReceiver<Interface> receiver) {
content::WebContents* contents =
content::WebContents::FromRenderFrameHost(frame_host);
if (contents)
contents->GetJavaInterfaces()->GetInterface(std::move(receiver));
}
template <typename Interface>
void ForwardToJavaFrame(content::RenderFrameHost* render_frame_host,
mojo::PendingReceiver<Interface> receiver) {
render_frame_host->GetJavaInterfaces()->GetInterface(std::move(receiver));
}
#endif
} // namespace
void PopulateWebLayerFrameBinders(
content::RenderFrameHost* render_frame_host,
mojo::BinderMapWithContext<content::RenderFrameHost*>* map) {
map->Add<weblayer_internals::mojom::PageHandler>(
base::BindRepeating(&BindPageHandler));
map->Add<translate::mojom::ContentTranslateDriver>(
base::BindRepeating(&BindContentTranslateDriver));
map->Add<blink::mojom::NoStatePrefetchProcessor>(
base::BindRepeating(&BindNoStatePrefetchProcessor));
map->Add<prerender::mojom::PrerenderCanceler>(
base::BindRepeating(&BindPrerenderCanceler));
#if defined(OS_ANDROID)
map->Add<blink::mojom::InstalledAppProvider>(base::BindRepeating(
&ForwardToJavaFrame<blink::mojom::InstalledAppProvider>));
map->Add<blink::mojom::ShareService>(base::BindRepeating(
&ForwardToJavaWebContents<blink::mojom::ShareService>));
map->Add<payments::mojom::PaymentRequest>(base::BindRepeating(
&ForwardToJavaFrame<payments::mojom::PaymentRequest>));
#endif
}
} // namespace weblayer
| 1,924 |
848 | <reponame>abhaikollara/tensorflow<filename>tensorflow/lite/kernels/squared_difference_test.cc
/* Copyright 2018 The TensorFlow Authors. All Rights Reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
==============================================================================*/
#include <gtest/gtest.h>
#include "tensorflow/lite/interpreter.h"
#include "tensorflow/lite/kernels/register.h"
#include "tensorflow/lite/kernels/test_util.h"
#include "tensorflow/lite/model.h"
namespace tflite {
namespace {
using ::testing::ElementsAreArray;
class BaseSquaredDifferenceOpModel : public SingleOpModel {
public:
BaseSquaredDifferenceOpModel(const TensorData& input1,
const TensorData& input2,
const TensorData& output) {
input1_ = AddInput(input1);
input2_ = AddInput(input2);
output_ = AddOutput(output);
SetBuiltinOp(BuiltinOperator_SQUARED_DIFFERENCE,
BuiltinOptions_SquaredDifferenceOptions,
CreateSquaredDifferenceOptions(builder_).Union());
BuildInterpreter({GetShape(input1_), GetShape(input2_)});
}
int input1() { return input1_; }
int input2() { return input2_; }
protected:
int input1_;
int input2_;
int output_;
};
class FloatSquaredDifferenceOpModel : public BaseSquaredDifferenceOpModel {
public:
using BaseSquaredDifferenceOpModel::BaseSquaredDifferenceOpModel;
std::vector<float> GetOutput() { return ExtractVector<float>(output_); }
};
class IntegerSquaredDifferenceOpModel : public BaseSquaredDifferenceOpModel {
public:
using BaseSquaredDifferenceOpModel::BaseSquaredDifferenceOpModel;
std::vector<int32_t> GetOutput() { return ExtractVector<int32_t>(output_); }
};
TEST(FloatSquaredDifferenceOpTest, FloatType_SameShape) {
FloatSquaredDifferenceOpModel m({TensorType_FLOAT32, {1, 2, 2, 1}},
{TensorType_FLOAT32, {1, 2, 2, 1}},
{TensorType_FLOAT32, {}});
m.PopulateTensor<float>(m.input1(), {-0.2, 0.2, -1.2, 0.8});
m.PopulateTensor<float>(m.input2(), {0.5, 0.2, -1.5, 0.5});
m.Invoke();
EXPECT_THAT(m.GetOutput(),
ElementsAreArray(ArrayFloatNear({0.49, 0.0, 0.09, 0.09})));
}
TEST(FloatSquaredDifferenceOpTest, FloatType_VariousInputShapes) {
std::vector<std::vector<int>> test_shapes = {
{6}, {2, 3}, {2, 1, 3}, {1, 3, 1, 2}};
for (int i = 0; i < test_shapes.size(); ++i) {
FloatSquaredDifferenceOpModel m({TensorType_FLOAT32, test_shapes[i]},
{TensorType_FLOAT32, test_shapes[i]},
{TensorType_FLOAT32, {}});
m.PopulateTensor<float>(m.input1(), {-2.0, 0.2, 0.3, 0.8, 1.1, -2.0});
m.PopulateTensor<float>(m.input2(), {1.0, 0.2, 0.6, 0.4, -1.0, -0.0});
m.Invoke();
EXPECT_THAT(
m.GetOutput(),
ElementsAreArray(ArrayFloatNear({9.0, 0.0, 0.09, 0.16, 4.41, 4.0})))
<< "With shape number " << i;
}
}
TEST(FloatSquaredDifferenceOpTest, FloatType_WithBroadcast) {
std::vector<std::vector<int>> test_shapes = {
{6}, {2, 3}, {2, 1, 3}, {1, 3, 1, 2}};
for (int i = 0; i < test_shapes.size(); ++i) {
FloatSquaredDifferenceOpModel m(
{TensorType_FLOAT32, test_shapes[i]},
{TensorType_FLOAT32, {}}, // always a scalar
{TensorType_FLOAT32, {}});
m.PopulateTensor<float>(m.input1(), {-0.2, 0.2, 0.5, 0.8, 0.11, 1.1});
m.PopulateTensor<float>(m.input2(), {0.1});
m.Invoke();
EXPECT_THAT(
m.GetOutput(),
ElementsAreArray(ArrayFloatNear({0.09, 0.01, 0.16, 0.49, 0.0001, 1.0})))
<< "With shape number " << i;
}
}
TEST(IntegerSquaredDifferenceOpTest, IntegerType_SameShape) {
IntegerSquaredDifferenceOpModel m({TensorType_INT32, {1, 2, 2, 1}},
{TensorType_INT32, {1, 2, 2, 1}},
{TensorType_INT32, {}});
m.PopulateTensor<int32_t>(m.input1(), {-2, 2, -15, 8});
m.PopulateTensor<int32_t>(m.input2(), {5, -2, -3, 5});
m.Invoke();
EXPECT_THAT(m.GetOutput(), ElementsAreArray({49, 16, 144, 9}));
}
TEST(IntegerSquaredDifferenceOpTest, IntegerType_VariousInputShapes) {
std::vector<std::vector<int>> test_shapes = {
{6}, {2, 3}, {2, 1, 3}, {1, 3, 1, 2}};
for (int i = 0; i < test_shapes.size(); ++i) {
IntegerSquaredDifferenceOpModel m({TensorType_INT32, test_shapes[i]},
{TensorType_INT32, test_shapes[i]},
{TensorType_INT32, {}});
m.PopulateTensor<int32_t>(m.input1(), {-20, 2, 3, 8, 11, -20});
m.PopulateTensor<int32_t>(m.input2(), {1, 2, 6, 5, -5, -20});
m.Invoke();
EXPECT_THAT(m.GetOutput(), ElementsAreArray({441, 0, 9, 9, 256, 0}))
<< "With shape number " << i;
}
}
TEST(IntegerSquaredDifferenceOpTest, IntegerType_WithBroadcast) {
std::vector<std::vector<int>> test_shapes = {
{6}, {2, 3}, {2, 1, 3}, {1, 3, 1, 2}};
for (int i = 0; i < test_shapes.size(); ++i) {
IntegerSquaredDifferenceOpModel m(
{TensorType_INT32, test_shapes[i]},
{TensorType_INT32, {}}, // always a scalar
{TensorType_INT32, {}});
m.PopulateTensor<int32_t>(m.input1(), {-20, 10, 7, 3, 1, 13});
m.PopulateTensor<int32_t>(m.input2(), {3});
m.Invoke();
EXPECT_THAT(m.GetOutput(), ElementsAreArray({529, 49, 16, 0, 4, 100}))
<< "With shape number " << i;
}
}
} // namespace
} // namespace tflite
| 2,684 |
2,686 | <filename>awaitility/src/main/java/org/awaitility/constraint/AtMostWaitConstraint.java
package org.awaitility.constraint;
import java.time.Duration;
import java.time.temporal.ChronoUnit;
public class AtMostWaitConstraint implements WaitConstraint {
private final Duration atMostDuration;
public static final AtMostWaitConstraint FOREVER = new AtMostWaitConstraint(ChronoUnit.FOREVER.getDuration());
public static final AtMostWaitConstraint TEN_SECONDS = new AtMostWaitConstraint(Duration.ofSeconds(10));
AtMostWaitConstraint(Duration atMostDuration) {
this.atMostDuration = atMostDuration;
}
public Duration getMaxWaitTime() {
return atMostDuration;
}
public Duration getMinWaitTime() {
return Duration.ZERO;
}
public Duration getHoldPredicateTime() {
return Duration.ZERO;
}
public WaitConstraint withMinWaitTime(Duration minWaitTime) {
return new IntervalWaitConstraint(minWaitTime, atMostDuration);
}
public WaitConstraint withMaxWaitTime(Duration maxWaitTime) {
return new AtMostWaitConstraint(maxWaitTime);
}
public WaitConstraint withHoldPredicateTime(Duration holdConditionTime) {
return new HoldsPredicateWaitConstraint(getMinWaitTime(), atMostDuration, holdConditionTime);
}
}
| 455 |
365 | <reponame>MarkC-b3d/blender-mc3d<gh_stars>100-1000
/*
* Copyright 2011-2013 Blender Foundation
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
CCL_NAMESPACE_BEGIN
/* Attribute Node */
ccl_device AttributeDescriptor svm_node_attr_init(
KernelGlobals *kg, ShaderData *sd, uint4 node, NodeAttributeOutputType *type, uint *out_offset)
{
*out_offset = node.z;
*type = (NodeAttributeOutputType)node.w;
AttributeDescriptor desc;
if (sd->object != OBJECT_NONE) {
desc = find_attribute(kg, sd, node.y);
if (desc.offset == ATTR_STD_NOT_FOUND) {
desc = attribute_not_found();
desc.offset = 0;
desc.type = (NodeAttributeType)node.w;
}
}
else {
/* background */
desc = attribute_not_found();
desc.offset = 0;
desc.type = (NodeAttributeType)node.w;
}
return desc;
}
ccl_device void svm_node_attr(KernelGlobals *kg, ShaderData *sd, float *stack, uint4 node)
{
NodeAttributeOutputType type = NODE_ATTR_OUTPUT_FLOAT;
uint out_offset = 0;
AttributeDescriptor desc = svm_node_attr_init(kg, sd, node, &type, &out_offset);
#ifdef __VOLUME__
/* Volumes
* NOTE: moving this into its own node type might help improve performance. */
if (primitive_is_volume_attribute(sd, desc)) {
const float4 value = volume_attribute_float4(kg, sd, desc);
if (type == NODE_ATTR_OUTPUT_FLOAT) {
const float f = volume_attribute_value_to_float(value);
stack_store_float(stack, out_offset, f);
}
else if (type == NODE_ATTR_OUTPUT_FLOAT3) {
const float3 f = volume_attribute_value_to_float3(value);
stack_store_float3(stack, out_offset, f);
}
else {
const float f = volume_attribute_value_to_alpha(value);
stack_store_float(stack, out_offset, f);
}
return;
}
#endif
/* Surface. */
if (desc.type == NODE_ATTR_FLOAT) {
float f = primitive_surface_attribute_float(kg, sd, desc, NULL, NULL);
if (type == NODE_ATTR_OUTPUT_FLOAT) {
stack_store_float(stack, out_offset, f);
}
else if (type == NODE_ATTR_OUTPUT_FLOAT3) {
stack_store_float3(stack, out_offset, make_float3(f, f, f));
}
else {
stack_store_float(stack, out_offset, 1.0f);
}
}
else if (desc.type == NODE_ATTR_FLOAT2) {
float2 f = primitive_surface_attribute_float2(kg, sd, desc, NULL, NULL);
if (type == NODE_ATTR_OUTPUT_FLOAT) {
stack_store_float(stack, out_offset, f.x);
}
else if (type == NODE_ATTR_OUTPUT_FLOAT3) {
stack_store_float3(stack, out_offset, make_float3(f.x, f.y, 0.0f));
}
else {
stack_store_float(stack, out_offset, 1.0f);
}
}
else if (desc.type == NODE_ATTR_FLOAT4 || desc.type == NODE_ATTR_RGBA) {
float4 f = primitive_surface_attribute_float4(kg, sd, desc, NULL, NULL);
if (type == NODE_ATTR_OUTPUT_FLOAT) {
stack_store_float(stack, out_offset, average(float4_to_float3(f)));
}
else if (type == NODE_ATTR_OUTPUT_FLOAT3) {
stack_store_float3(stack, out_offset, float4_to_float3(f));
}
else {
stack_store_float(stack, out_offset, f.w);
}
}
else {
float3 f = primitive_surface_attribute_float3(kg, sd, desc, NULL, NULL);
if (type == NODE_ATTR_OUTPUT_FLOAT) {
stack_store_float(stack, out_offset, average(f));
}
else if (type == NODE_ATTR_OUTPUT_FLOAT3) {
stack_store_float3(stack, out_offset, f);
}
else {
stack_store_float(stack, out_offset, 1.0f);
}
}
}
ccl_device void svm_node_attr_bump_dx(KernelGlobals *kg, ShaderData *sd, float *stack, uint4 node)
{
NodeAttributeOutputType type = NODE_ATTR_OUTPUT_FLOAT;
uint out_offset = 0;
AttributeDescriptor desc = svm_node_attr_init(kg, sd, node, &type, &out_offset);
#ifdef __VOLUME__
/* Volume */
if (primitive_is_volume_attribute(sd, desc)) {
if (type == NODE_ATTR_OUTPUT_FLOAT) {
stack_store_float(stack, out_offset, 0.0f);
}
else if (type == NODE_ATTR_OUTPUT_FLOAT3) {
stack_store_float3(stack, out_offset, make_float3(0.0f, 0.0f, 0.0f));
}
else {
stack_store_float(stack, out_offset, 1.0f);
}
return;
}
#endif
/* Surface */
if (desc.type == NODE_ATTR_FLOAT) {
float dx;
float f = primitive_surface_attribute_float(kg, sd, desc, &dx, NULL);
if (type == NODE_ATTR_OUTPUT_FLOAT) {
stack_store_float(stack, out_offset, f + dx);
}
else if (type == NODE_ATTR_OUTPUT_FLOAT3) {
stack_store_float3(stack, out_offset, make_float3(f + dx, f + dx, f + dx));
}
else {
stack_store_float(stack, out_offset, 1.0f);
}
}
else if (desc.type == NODE_ATTR_FLOAT2) {
float2 dx;
float2 f = primitive_surface_attribute_float2(kg, sd, desc, &dx, NULL);
if (type == NODE_ATTR_OUTPUT_FLOAT) {
stack_store_float(stack, out_offset, f.x + dx.x);
}
else if (type == NODE_ATTR_OUTPUT_FLOAT3) {
stack_store_float3(stack, out_offset, make_float3(f.x + dx.x, f.y + dx.y, 0.0f));
}
else {
stack_store_float(stack, out_offset, 1.0f);
}
}
else if (desc.type == NODE_ATTR_FLOAT4 || desc.type == NODE_ATTR_RGBA) {
float4 dx;
float4 f = primitive_surface_attribute_float4(kg, sd, desc, &dx, NULL);
if (type == NODE_ATTR_OUTPUT_FLOAT) {
stack_store_float(stack, out_offset, average(float4_to_float3(f + dx)));
}
else if (type == NODE_ATTR_OUTPUT_FLOAT3) {
stack_store_float3(stack, out_offset, float4_to_float3(f + dx));
}
else {
stack_store_float(stack, out_offset, f.w + dx.w);
}
}
else {
float3 dx;
float3 f = primitive_surface_attribute_float3(kg, sd, desc, &dx, NULL);
if (type == NODE_ATTR_OUTPUT_FLOAT) {
stack_store_float(stack, out_offset, average(f + dx));
}
else if (type == NODE_ATTR_OUTPUT_FLOAT3) {
stack_store_float3(stack, out_offset, f + dx);
}
else {
stack_store_float(stack, out_offset, 1.0f);
}
}
}
ccl_device void svm_node_attr_bump_dy(KernelGlobals *kg, ShaderData *sd, float *stack, uint4 node)
{
NodeAttributeOutputType type = NODE_ATTR_OUTPUT_FLOAT;
uint out_offset = 0;
AttributeDescriptor desc = svm_node_attr_init(kg, sd, node, &type, &out_offset);
#ifdef __VOLUME__
/* Volume */
if (primitive_is_volume_attribute(sd, desc)) {
if (type == NODE_ATTR_OUTPUT_FLOAT) {
stack_store_float(stack, out_offset, 0.0f);
}
else if (type == NODE_ATTR_OUTPUT_FLOAT3) {
stack_store_float3(stack, out_offset, make_float3(0.0f, 0.0f, 0.0f));
}
else {
stack_store_float(stack, out_offset, 1.0f);
}
return;
}
#endif
/* Surface */
if (desc.type == NODE_ATTR_FLOAT) {
float dy;
float f = primitive_surface_attribute_float(kg, sd, desc, NULL, &dy);
if (type == NODE_ATTR_OUTPUT_FLOAT) {
stack_store_float(stack, out_offset, f + dy);
}
else if (type == NODE_ATTR_OUTPUT_FLOAT3) {
stack_store_float3(stack, out_offset, make_float3(f + dy, f + dy, f + dy));
}
else {
stack_store_float(stack, out_offset, 1.0f);
}
}
else if (desc.type == NODE_ATTR_FLOAT2) {
float2 dy;
float2 f = primitive_surface_attribute_float2(kg, sd, desc, NULL, &dy);
if (type == NODE_ATTR_OUTPUT_FLOAT) {
stack_store_float(stack, out_offset, f.x + dy.x);
}
else if (type == NODE_ATTR_OUTPUT_FLOAT3) {
stack_store_float3(stack, out_offset, make_float3(f.x + dy.x, f.y + dy.y, 0.0f));
}
else {
stack_store_float(stack, out_offset, 1.0f);
}
}
else if (desc.type == NODE_ATTR_FLOAT4 || desc.type == NODE_ATTR_RGBA) {
float4 dy;
float4 f = primitive_surface_attribute_float4(kg, sd, desc, NULL, &dy);
if (type == NODE_ATTR_OUTPUT_FLOAT) {
stack_store_float(stack, out_offset, average(float4_to_float3(f + dy)));
}
else if (type == NODE_ATTR_OUTPUT_FLOAT3) {
stack_store_float3(stack, out_offset, float4_to_float3(f + dy));
}
else {
stack_store_float(stack, out_offset, f.w + dy.w);
}
}
else {
float3 dy;
float3 f = primitive_surface_attribute_float3(kg, sd, desc, NULL, &dy);
if (type == NODE_ATTR_OUTPUT_FLOAT) {
stack_store_float(stack, out_offset, average(f + dy));
}
else if (type == NODE_ATTR_OUTPUT_FLOAT3) {
stack_store_float3(stack, out_offset, f + dy);
}
else {
stack_store_float(stack, out_offset, 1.0f);
}
}
}
CCL_NAMESPACE_END
| 3,939 |
852 | <filename>RecoTracker/TrackProducer/test/TrackRefitOld.py<gh_stars>100-1000
import FWCore.ParameterSet.Config as cms
process = cms.Process("Refitting")
### standard MessageLoggerConfiguration
process.load("FWCore.MessageService.MessageLogger_cfi")
### Standard Configurations
process.load("Configuration.StandardSequences.Services_cff")
process.load('Configuration/StandardSequences/GeometryIdeal_cff')
process.load('Configuration/StandardSequences/Reconstruction_cff')
process.load('Configuration/StandardSequences/MagneticField_AutoFromDBCurrent_cff')
## Fitter-smoother: loosen outlier rejection as for first data-taking with LHC "collisions"
process.KFFittingSmootherWithOutliersRejectionAndRK.BreakTrajWith2ConsecutiveMissing = False
process.KFFittingSmootherWithOutliersRejectionAndRK.EstimateCut = 1000
### Conditions
process.load("Configuration.StandardSequences.FrontierConditions_GlobalTag_cff")
#process.GlobalTag.globaltag = "IDEAL_V5::All"
process.GlobalTag.globaltag = 'GR09_P_V6::All'
### Track refitter specific stuff
process.load("RecoTracker.TrackProducer.TrackRefitters_cff") #the correct one
#process.load("RecoTracker.TrackProducer.RefitterWithMaterial_cff") #the one for backward compatibility
process.maxEvents = cms.untracked.PSet( input = cms.untracked.int32(-1)
)
process.source = cms.Source("PoolSource",
### tracks from collisions
fileNames = cms.untracked.vstring(
'rfio:/castor/cern.ch/user/c/chiochia/09_beam_commissioning/BSCskim_123151_Express.root')
#'/store/relval/CMSSW_2_1_10/RelValTTbar/GEN-SIM-DIGI-RAW-HLTDEBUG-RECO/IDEAL_V9_v1/0001/1E04FC31-F99A-DD11-94EE-0018F3D096DE.root')
### tracks from cosmics
# fileNames = cms.untracked.vstring(
# '/store/data/CRUZET4_v1/Cosmics/RECO/CRZT210_V1_SuperPointing_v1/0000/005F51E5-0373-DD11-B6FA-001731AF6B7D.root',
# '/store/data/CRUZET4_v1/Cosmics/RECO/CRZT210_V1_SuperPointing_v1/0000/005F51E5-0373-DD11-B6FA-001731AF6B7D.root',
# '/store/data/CRUZET4_v1/Cosmics/RECO/CRZT210_V1_SuperPointing_v1/0000/006F3A6A-0373-DD11-A8E7-00304876A0FF.root',
# '/store/data/CRUZET4_v1/Cosmics/RECO/CRZT210_V1_SuperPointing_v1/0000/02CF5B1E-6476-DD11-A034-003048769E65.root',
# '/store/data/CRUZET4_v1/Cosmics/RECO/CRZT210_V1_SuperPointing_v1/0000/02DF31C3-A775-DD11-91C2-001A92971BB8.root',
# '/store/data/CRUZET4_v1/Cosmics/RECO/CRZT210_V1_SuperPointing_v1/0000/02F71F56-CE74-DD11-9DD0-001A92810AE4.root',
# '/store/data/CRUZET4_v1/Cosmics/RECO/CRZT210_V1_SuperPointing_v1/0000/0446C89C-E072-DD11-A341-0018F3D0960C.root',
# '/store/data/CRUZET4_v1/Cosmics/RECO/CRZT210_V1_SuperPointing_v1/0000/04750FC3-3E73-DD11-B054-00304876A147.root',
# '/store/data/CRUZET4_v1/Cosmics/RECO/CRZT210_V1_SuperPointing_v1/0000/04DFD531-0473-DD11-964E-0018F3D096AE.root',
# '/store/data/CRUZET4_v1/Cosmics/RECO/CRZT210_V1_SuperPointing_v1/0000/067111FB-3873-DD11-AD86-00304875A9C5.root',
# '/store/data/CRUZET4_v1/Cosmics/RECO/CRZT210_V1_SuperPointing_v1/0000/067982F4-E175-DD11-99F7-001731AF6AC5.root',
# '/store/data/CRUZET4_v1/Cosmics/RECO/CRZT210_V1_SuperPointing_v1/0000/0680EB9B-4F73-DD11-83F8-0018F3D0962E.root',
# '/store/data/CRUZET4_v1/Cosmics/RECO/CRZT210_V1_SuperPointing_v1/0000/06BF1AF3-E175-DD11-B467-00304876A147.root',
# '/store/data/CRUZET4_v1/Cosmics/RECO/CRZT210_V1_SuperPointing_v1/0000/0A3843F3-E175-DD11-8419-003048767EE7.root',
# '/store/data/CRUZET4_v1/Cosmics/RECO/CRZT210_V1_SuperPointing_v1/0000/0A5AAABA-3973-DD11-B949-003048767FA1.root',
# '/store/data/CRUZET4_v1/Cosmics/RECO/CRZT210_V1_SuperPointing_v1/0000/0A911B18-0273-DD11-A5A6-001731A283E1.root')
### tracks from beam halo muons
)
process.TRACKS = cms.OutputModule("PoolOutputModule",
outputCommands = cms.untracked.vstring('drop *_*_*_*',
'keep recoTracks_*_*_*',
'keep recoTrackExtras_*_*_*',
'keep TrackingRecHitsOwned_*_*_*'),
fileName = cms.untracked.string('refitting.root')
)
process.options = cms.untracked.PSet( wantSummary = cms.untracked.bool(True) )
process.p1 = cms.Path(process.TrackRefitter
#process.TrackRefitterP5
#process.TrackRefitterBHM
)
process.outpath = cms.EndPath(process.TRACKS)
| 2,409 |
1,511 | <filename>tests/cluecode/data/ics/svox-pico-lib/picofftsg.c
/*
* Copyright (C) 2008-2009 SVOX AG, Baslerstr. 30, 8048 Zuerich, Switzerland
*
* Licensed under the Apache License, Version 2.0 (the "License");
*
* FFT/DCT related data types, constants and functions in Pico
*
* Copyright (C) 2008-2009 SVOX AG, Baslerstr. 30, 8048 Zuerich, Switzerland
* All rights reserved.
*
* @addtogroup picofft
* ---------------------------------------------------\n
* <b> Fast Fourier/Cosine/Sine Transform </b>\n
* Adapted from http://www.kurims.kyoto-u.ac.jp/~ooura/fft.html (Copyright Takuya OOURA, 1996-2001)\n
* ---------------------------------------------------\n
| 218 |
711 | <gh_stars>100-1000
package com.java110.front.components.ownerRepair;
import com.java110.core.context.IPageData;
import com.java110.front.smo.IRoomServiceSMO;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.http.ResponseEntity;
import org.springframework.stereotype.Component;
/**
* 业主报修组件管理类
* <p>
* add by wuxw
* <p>
* 2019-06-29
*/
@Component("ownerRepairDetail")
public class OwnerRepairDetailComponent {
@Autowired
private IRoomServiceSMO roomServiceSMOImpl;
public ResponseEntity<String> getRoom(IPageData pd) {
return roomServiceSMOImpl.listRoom(pd);
}
public IRoomServiceSMO getRoomServiceSMOImpl() {
return roomServiceSMOImpl;
}
public void setRoomServiceSMOImpl(IRoomServiceSMO roomServiceSMOImpl) {
this.roomServiceSMOImpl = roomServiceSMOImpl;
}
}
| 340 |
589 | package rocks.inspectit.shared.all.communication;
import rocks.inspectit.shared.all.cmr.cache.IObjectSizes;
/**
* Enumeration for the exception events.
*
* @author <NAME>
*
*/
public enum ExceptionEvent implements Sizeable {
CREATED, RETHROWN, PASSED, HANDLED, UNREGISTERED_PASSED; // NOCHK
/**
* Utility method to convert an ordinal value into the respective enumeration. Used e.g. for
* hibernate.
*
* @param i
* the ordinal value.
* @return the exception event
*/
public static ExceptionEvent fromOrd(int i) {
if ((i < 0) || (i >= ExceptionEvent.values().length)) {
throw new IndexOutOfBoundsException("Invalid ordinal");
}
return ExceptionEvent.values()[i];
}
/**
* {@inheritDoc}
*/
@Override
public long getObjectSize(IObjectSizes objectSizes) {
return getObjectSize(objectSizes, true);
}
/**
* {@inheritDoc}
*/
@Override
public long getObjectSize(IObjectSizes objectSizes, boolean doAlign) {
long size = objectSizes.getSizeOfObjectHeader();
size += objectSizes.getPrimitiveTypesSize(1, 0, 1, 0, 0, 0);
size += objectSizes.getSizeOf(name());
if (doAlign) {
return objectSizes.alignTo8Bytes(size);
} else {
return size;
}
}
}
| 445 |
13,648 | # Test board-specific items on PYBv1.x
import os, pyb
if not "PYBv1." in os.uname().machine:
print("SKIP")
raise SystemExit
# test creating UART by id/name
for bus in (1, 2, 3, 4, 5, 6, 7, "XA", "XB", "YA", "YB", "Z"):
try:
pyb.UART(bus, 9600)
print("UART", bus)
except ValueError:
print("ValueError", bus)
# test creating SPI by id/name
for bus in (1, 2, 3, "X", "Y", "Z"):
try:
pyb.SPI(bus)
print("SPI", bus)
except ValueError:
print("ValueError", bus)
# test creating I2C by id/name
for bus in (2, 3, "X", "Y", "Z"):
try:
pyb.I2C(bus)
print("I2C", bus)
except ValueError:
print("ValueError", bus)
# test creating CAN by id/name
for bus in (1, 2, 3, "YA", "YB", "YC"):
try:
pyb.CAN(bus, pyb.CAN.LOOPBACK)
print("CAN", bus)
except ValueError:
print("ValueError", bus)
| 452 |
575 | <filename>chromecast/crash/linux/crash_testing_utils.cc
// Copyright 2015 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "chromecast/crash/linux/crash_testing_utils.h"
#include <utility>
#include "base/files/file_util.h"
#include "base/logging.h"
#include "base/strings/string_split.h"
#include "base/strings/stringprintf.h"
#include "base/values.h"
#include "chromecast/base/path_utils.h"
#include "chromecast/base/serializers.h"
#include "chromecast/crash/linux/dump_info.h"
#define RCHECK(cond, retval, err) \
do { \
if (!(cond)) { \
LOG(ERROR) << (err); \
return (retval); \
} \
} while (0)
namespace chromecast {
namespace {
const char kRatelimitKey[] = "ratelimit";
const char kRatelimitPeriodStartKey[] = "period_start";
const char kRatelimitPeriodDumpsKey[] = "period_dumps";
std::unique_ptr<base::ListValue> ParseLockFile(const std::string& path) {
std::string lockfile_string;
RCHECK(base::ReadFileToString(base::FilePath(path), &lockfile_string),
nullptr,
"Failed to read file");
std::vector<std::string> lines = base::SplitString(
lockfile_string, "\n", base::KEEP_WHITESPACE, base::SPLIT_WANT_NONEMPTY);
std::unique_ptr<base::ListValue> dumps = std::make_unique<base::ListValue>();
// Validate dumps
for (const std::string& line : lines) {
if (line.size() == 0)
continue;
std::unique_ptr<base::Value> dump_info = DeserializeFromJson(line);
DumpInfo info(dump_info.get());
RCHECK(info.valid(), nullptr, "Invalid DumpInfo");
dumps->Append(std::move(dump_info));
}
return dumps;
}
std::unique_ptr<base::Value> ParseMetadataFile(const std::string& path) {
return DeserializeJsonFromFile(base::FilePath(path));
}
int WriteLockFile(const std::string& path, base::ListValue* contents) {
DCHECK(contents);
std::string lockfile;
for (const auto& elem : *contents) {
base::Optional<std::string> dump_info = SerializeToJson(elem);
RCHECK(dump_info, -1, "Failed to serialize DumpInfo");
lockfile += *dump_info;
lockfile += "\n"; // Add line seperatators
}
return WriteFile(base::FilePath(path), lockfile.c_str(), lockfile.size()) >= 0
? 0
: -1;
}
bool WriteMetadataFile(const std::string& path, const base::Value* metadata) {
DCHECK(metadata);
return SerializeJsonToFile(base::FilePath(path), *metadata);
}
} // namespace
std::unique_ptr<DumpInfo> CreateDumpInfo(const std::string& json_string) {
std::unique_ptr<base::Value> value(DeserializeFromJson(json_string));
return std::make_unique<DumpInfo>(value.get());
}
bool FetchDumps(const std::string& lockfile_path,
std::vector<std::unique_ptr<DumpInfo>>* dumps) {
DCHECK(dumps);
std::unique_ptr<base::ListValue> dump_list = ParseLockFile(lockfile_path);
RCHECK(dump_list, false, "Failed to parse lockfile");
dumps->clear();
for (const auto& elem : *dump_list) {
std::unique_ptr<DumpInfo> dump(new DumpInfo(&elem));
RCHECK(dump->valid(), false, "Invalid DumpInfo");
dumps->push_back(std::move(dump));
}
return true;
}
bool ClearDumps(const std::string& lockfile_path) {
std::unique_ptr<base::ListValue> dump_list =
std::make_unique<base::ListValue>();
return WriteLockFile(lockfile_path, dump_list.get()) == 0;
}
bool CreateFiles(const std::string& lockfile_path,
const std::string& metadata_path) {
std::unique_ptr<base::DictionaryValue> metadata =
std::make_unique<base::DictionaryValue>();
auto ratelimit_fields = std::make_unique<base::DictionaryValue>();
ratelimit_fields->SetDouble(kRatelimitPeriodStartKey, 0.0);
ratelimit_fields->SetInteger(kRatelimitPeriodDumpsKey, 0);
metadata->Set(kRatelimitKey, std::move(ratelimit_fields));
std::unique_ptr<base::ListValue> dumps = std::make_unique<base::ListValue>();
return WriteLockFile(lockfile_path, dumps.get()) == 0 &&
WriteMetadataFile(metadata_path, metadata.get());
}
bool AppendLockFile(const std::string& lockfile_path,
const std::string& metadata_path,
const DumpInfo& dump) {
std::unique_ptr<base::ListValue> contents = ParseLockFile(lockfile_path);
if (!contents) {
CreateFiles(lockfile_path, metadata_path);
if (!(contents = ParseLockFile(lockfile_path))) {
return false;
}
}
contents->Append(dump.GetAsValue());
return WriteLockFile(lockfile_path, contents.get()) == 0;
}
bool SetRatelimitPeriodStart(const std::string& metadata_path,
const base::Time& start) {
std::unique_ptr<base::Value> contents = ParseMetadataFile(metadata_path);
base::DictionaryValue* dict;
base::DictionaryValue* ratelimit_params;
if (!contents || !contents->GetAsDictionary(&dict) ||
!dict->GetDictionary(kRatelimitKey, &ratelimit_params)) {
return false;
}
ratelimit_params->SetDouble(kRatelimitPeriodStartKey, start.ToDoubleT());
return WriteMetadataFile(metadata_path, contents.get()) == 0;
}
} // namespace chromecast
| 2,005 |
852 | <reponame>ckamtsikis/cmssw<filename>RecoJets/JetProducers/plugins/PtMinJetSelector.cc<gh_stars>100-1000
#include "FWCore/Framework/interface/MakerMacros.h"
#include "PtMinJetSelector.h"
DEFINE_FWK_MODULE(PtMinCaloJetSelector);
DEFINE_FWK_MODULE(PtMinGenJetSelector);
DEFINE_FWK_MODULE(PtMinPFJetSelector);
DEFINE_FWK_MODULE(PtMinBasicJetSelector);
| 150 |
2,151 | // Copyright 2014 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#ifndef ASH_CONTENT_ASH_WITH_CONTENT_EXPORT_H_
#define ASH_CONTENT_ASH_WITH_CONTENT_EXPORT_H_
// Defines ASH_EXPORT so that functionality implemented by the Ash module can
// be exported to consumers.
#if defined(COMPONENT_BUILD)
#if defined(WIN32)
#if defined(ASH_WITH_CONTENT_IMPLEMENTATION)
#define ASH_WITH_CONTENT_EXPORT __declspec(dllexport)
#else
#define ASH_WITH_CONTENT_EXPORT __declspec(dllimport)
#endif // defined(ASH_WITH_CONTENT_IMPLEMENTATION)
#else // defined(WIN32)
#if defined(ASH_WITH_CONTENT_IMPLEMENTATION)
#define ASH_WITH_CONTENT_EXPORT __attribute__((visibility("default")))
#else
#define ASH_WITH_CONTENT_EXPORT
#endif
#endif
#else // defined(COMPONENT_BUILD)
#define ASH_WITH_CONTENT_EXPORT
#endif
#endif // ASH_CONTENT_ASH_WITH_CONTENT_EXPORT_H_
| 355 |
398 | package com.ruiyun.jvppeteer.protocol.log;
import com.ruiyun.jvppeteer.protocol.log.LogEntry;
/**
* Issued when new message was logged.
*/
public class EntryAddedPayload {
/**
* The entry.
*/
private LogEntry entry;
public LogEntry getEntry() {
return entry;
}
public void setEntry(LogEntry entry) {
this.entry = entry;
}
@Override
public String toString() {
return "EntryAddedPayload{" +
"entry=" + entry +
'}';
}
}
| 234 |
5,169 | <reponame>Gantios/Specs
{
"name": "RAMUtil",
"version": "1.0.1",
"summary": "This is a util",
"description": "TODO: Add long description of the pod here.",
"homepage": "https://github.com/RamboQiu/RAMUtil.git",
"license": {
"type": "MIT",
"file": "LICENSE"
},
"authors": {
"RamboQiu": "<EMAIL>"
},
"source": {
"git": "https://github.com/RamboQiu/RAMUtil.git",
"tag": "1.0.1"
},
"platforms": {
"ios": "8.0"
},
"source_files": "RAMUtil/**/*",
"subspecs": [
{
"name": "RAMExport",
"source_files": "RAMUtil/RAMExport/*.{h,m}",
"public_header_files": "RAMUtil/RAMExport/*.h"
},
{
"name": "RAMLog",
"source_files": "RAMUtil/RAMLog/*.{h,m}",
"public_header_files": "RAMUtil/RAMLog/*.h"
},
{
"name": "RAMSafeCollection",
"source_files": "RAMUtil/RAMSafeCollection/*.{h,m}",
"public_header_files": "RAMUtil/RAMSafeCollection/*.h"
},
{
"name": "RAMMustOverrider",
"source_files": "RAMUtil/RAMMustOverrider/*.{h,m}",
"public_header_files": "RAMUtil/RAMMustOverrider/*.h"
},
{
"name": "RAMFrame",
"source_files": "RAMUtil/RAMFrame/*.{h,m}",
"public_header_files": "RAMUtil/RAMFrame/*.h"
},
{
"name": "RAMCellData",
"source_files": "RAMUtil/RAMCellData/*.{h,m}",
"public_header_files": "RAMUtil/RAMCellData/*.h"
},
{
"name": "RAMColor",
"source_files": "RAMUtil/RAMColor/*.{h,m}",
"public_header_files": "RAMUtil/RAMColor/*.h"
},
{
"name": "RAMImage",
"source_files": "RAMUtil/RAMImage/*.{h,m}",
"public_header_files": "RAMUtil/RAMImage/*.h"
}
]
}
| 836 |
429 | #include <stdlib.h>
#include <stdio.h>
#include <string.h>
#include <unistd.h>
#include <errno.h>
#include <fcntl.h>
char * mktemp(char * template) {
if (strstr(template + strlen(template)-6, "XXXXXX") != template + strlen(template) - 6) {
errno = EINVAL;
return NULL;
}
static int _i = 0;
char tmp[7] = {0};
sprintf(tmp,"%04d%02d", getpid(), _i++);
memcpy(template + strlen(template) - 6, tmp, 6);
return template;
}
int mkstemp(char * template) {
mktemp(template);
return open(template, O_RDWR | O_CREAT, 0600);
}
| 227 |
331 | //
// MLSubscriptionTableViewController.h
// Monal
//
// Created by <NAME> on 11/24/19.
// Copyright © 2019 Monal.im. All rights reserved.
//
#import <UIKit/UIKit.h>
NS_ASSUME_NONNULL_BEGIN
@interface MLSubscriptionTableViewController : UITableViewController
@end
NS_ASSUME_NONNULL_END
| 110 |
509 | <filename>core/src/main/java/com/onelogin/saml2/util/Constants.java
package com.onelogin.saml2.util;
/**
* Constants class of OneLogin's Java Toolkit.
*
* A class that contains several constants related to the SAML protocol
*/
public final class Constants {
/**
* Value added to the current time in time condition validations.
*/
public static final Integer ALOWED_CLOCK_DRIFT = 180; // 3 min in seconds
// NameID Formats
public static final String NAMEID_EMAIL_ADDRESS = "urn:oasis:names:tc:SAML:1.1:nameid-format:emailAddress";
public static final String NAMEID_X509_SUBJECT_NAME = "urn:oasis:names:tc:SAML:1.1:nameid-format:X509SubjectName";
public static final String NAMEID_WINDOWS_DOMAIN_QUALIFIED_NAME = "urn:oasis:names:tc:SAML:1.1:nameid-format:WindowsDomainQualifiedName";
public static final String NAMEID_UNSPECIFIED = "urn:oasis:names:tc:SAML:1.1:nameid-format:unspecified";
public static final String NAMEID_KERBEROS = "urn:oasis:names:tc:SAML:2.0:nameid-format:kerberos";
public static final String NAMEID_ENTITY = "urn:oasis:names:tc:SAML:2.0:nameid-format:entity";
public static final String NAMEID_TRANSIENT = "urn:oasis:names:tc:SAML:2.0:nameid-format:transient";
public static final String NAMEID_PERSISTENT = "urn:oasis:names:tc:SAML:2.0:nameid-format:persistent";
public static final String NAMEID_ENCRYPTED = "urn:oasis:names:tc:SAML:2.0:nameid-format:encrypted";
// Attribute Name Formats
public static final String ATTRNAME_FORMAT_UNSPECIFIED = "urn:oasis:names:tc:SAML:2.0:attrname-format:unspecified";
public static final String ATTRNAME_FORMAT_URI = "urn:oasis:names:tc:SAML:2.0:attrname-format:uri";
public static final String ATTRNAME_FORMAT_BASIC = "urn:oasis:names:tc:SAML:2.0:attrname-format:basic";
// Namespaces
public static final String NS_SAML = "urn:oasis:names:tc:SAML:2.0:assertion";
public static final String NS_SAMLP = "urn:oasis:names:tc:SAML:2.0:protocol";
public static final String NS_SOAP = "http://schemas.xmlsoap.org/soap/envelope/";
public static final String NS_MD = "urn:oasis:names:tc:SAML:2.0:metadata";
public static final String NS_XS = "http://www.w3.org/2001/XMLSchema";
public static final String NS_XSI = "http://www.w3.org/2001/XMLSchema-instance";
public static final String NS_XENC = "http://www.w3.org/2001/04/xmlenc#";
public static final String NS_DS = "http://www.w3.org/2000/09/xmldsig#";
// Bindings
public static final String BINDING_HTTP_POST = "urn:oasis:names:tc:SAML:2.0:bindings:HTTP-POST";
public static final String BINDING_HTTP_REDIRECT = "urn:oasis:names:tc:SAML:2.0:bindings:HTTP-Redirect";
public static final String BINDING_HTTP_ARTIFACT = "urn:oasis:names:tc:SAML:2.0:bindings:HTTP-Artifact";
public static final String BINDING_SOAP = "urn:oasis:names:tc:SAML:2.0:bindings:SOAP";
public static final String BINDING_DEFLATE = "urn:oasis:names:tc:SAML:2.0:bindings:URL-Encoding:DEFLATE";
// Auth Context Class
public static final String AC_UNSPECIFIED = "urn:oasis:names:tc:SAML:2.0:ac:classes:unspecified";
public static final String AC_PASSWORD = "urn:oasis:names:tc:SAML:2.0:ac:classes:Password";
public static final String AC_X509 = "urn:oasis:names:tc:SAML:2.0:ac:classes:X509";
public static final String AC_SMARTCARD = "urn:oasis:names:tc:SAML:2.0:ac:classes:Smartcard";
public static final String AC_KERBEROS = "urn:oasis:names:tc:SAML:2.0:ac:classes:Kerberos";
// Subject Confirmation
public static final String CM_BEARER = "urn:oasis:names:tc:SAML:2.0:cm:bearer";
public static final String CM_HOLDER_KEY = "urn:oasis:names:tc:SAML:2.0:cm:holder-of-key";
public static final String CM_SENDER_VOUCHES = "urn:oasis:names:tc:SAML:2.0:cm:sender-vouches";
// Status Codes
public static final String STATUS_SUCCESS = "urn:oasis:names:tc:SAML:2.0:status:Success";
public static final String STATUS_REQUESTER = "urn:oasis:names:tc:SAML:2.0:status:Requester";
public static final String STATUS_RESPONDER = "urn:oasis:names:tc:SAML:2.0:status:Responder";
public static final String STATUS_VERSION_MISMATCH = "urn:oasis:names:tc:SAML:2.0:status:VersionMismatch";
// Status Second-level Codes
public static final String STATUS_AUTHNFAILED = "urn:oasis:names:tc:SAML:2.0:status:AuthnFailed";
public static final String STATUS_INVALID_ATTRNAME_OR_VALUE = "urn:oasis:names:tc:SAML:2.0:status:InvalidAttrNameOrValue";
public static final String STATUS_INVALID_NAMEIDPOLICY = "urn:oasis:names:tc:SAML:2.0:status:InvalidNameIDPolicy";
public static final String STATUS_NO_AUTHNCONTEXT = "urn:oasis:names:tc:SAML:2.0:status:NoAuthnContext";
public static final String STATUS_NO_AVAILABLE_IDP = "urn:oasis:names:tc:SAML:2.0:status:NoAvailableIDP";
public static final String STATUS_NO_PASSIVE = "urn:oasis:names:tc:SAML:2.0:status:NoPassive";
public static final String STATUS_NO_SUPPORTED_IDP = "urn:oasis:names:tc:SAML:2.0:status:NoSupportedIDP";
public static final String STATUS_PARTIAL_LOGOUT = "urn:oasis:names:tc:SAML:2.0:status:PartialLogout";
public static final String STATUS_PROXY_COUNT_EXCEEDED = "urn:oasis:names:tc:SAML:2.0:status:ProxyCountExceeded";
public static final String STATUS_REQUEST_DENIED = "urn:oasis:names:tc:SAML:2.0:status:RequestDenied";
public static final String STATUS_REQUEST_UNSUPPORTED = "urn:oasis:names:tc:SAML:2.0:status:RequestUnsupported";
public static final String STATUS_REQUEST_VERSION_DEPRECATED = "urn:oasis:names:tc:SAML:2.0:status:RequestVersionDeprecated";
public static final String STATUS_REQUEST_VERSION_TOO_HIGH = "urn:oasis:names:tc:SAML:2.0:status:RequestVersionTooHigh";
public static final String STATUS_REQUEST_VERSION_TOO_LOW = "urn:oasis:names:tc:SAML:2.0:status:RequestVersionTooLow";
public static final String STATUS_RESOURCE_NOT_RECOGNIZED = "urn:oasis:names:tc:SAML:2.0:status:ResourceNotRecognized";
public static final String STATUS_TOO_MANY_RESPONSES = "urn:oasis:names:tc:SAML:2.0:status:TooManyResponses";
public static final String STATUS_UNKNOWN_ATTR_PROFILE = "urn:oasis:names:tc:SAML:2.0:status:UnknownAttrProfile";
public static final String STATUS_UNKNOWN_PRINCIPAL = "urn:oasis:names:tc:SAML:2.0:status:UnknownPrincipal";
public static final String STATUS_UNSUPPORTED_BINDING = "urn:oasis:names:tc:SAML:2.0:status:UnsupportedBinding";
// Canonization
public static final String C14N = "http://www.w3.org/TR/2001/REC-xml-c14n-20010315";
public static final String C14N_WC = "http://www.w3.org/TR/2001/REC-xml-c14n-20010315#WithComments";
public static final String C14N11 = "http://www.w3.org/2006/12/xml-c14n11";
public static final String C14N11_WC = "http://www.w3.org/2006/12/xml-c14n11#WithComments";
public static final String C14NEXC = "http://www.w3.org/2001/10/xml-exc-c14n#";
public static final String C14NEXC_WC = "http://www.w3.org/2001/10/xml-exc-c14n#WithComments";
// Sign & Crypt
// https://www.w3.org/TR/xmlenc-core/#sec-Alg-MessageDigest
// https://www.w3.org/TR/xmlsec-algorithms/#signature-method-uris
// https://tools.ietf.org/html/rfc6931
public static final String SHA1 = "http://www.w3.org/2000/09/xmldsig#sha1";
public static final String SHA256 = "http://www.w3.org/2001/04/xmlenc#sha256";
public static final String SHA384 = "http://www.w3.org/2001/04/xmldsig-more#sha384";
public static final String SHA512 = "http://www.w3.org/2001/04/xmlenc#sha512";
public static final String DSA_SHA1 = "http://www.w3.org/2000/09/xmldsig#dsa-sha1";
public static final String RSA_SHA1 = "http://www.w3.org/2000/09/xmldsig#rsa-sha1";
public static final String RSA_SHA256 = "http://www.w3.org/2001/04/xmldsig-more#rsa-sha256";
public static final String RSA_SHA384 = "http://www.w3.org/2001/04/xmldsig-more#rsa-sha384";
public static final String RSA_SHA512 = "http://www.w3.org/2001/04/xmldsig-more#rsa-sha512";
public static final String TRIPLEDES_CBC = "http://www.w3.org/2001/04/xmlenc#tripledes-cbc";
public static final String AES128_CBC = "http://www.w3.org/2001/04/xmlenc#aes128-cbc";
public static final String AES192_CBC = "http://www.w3.org/2001/04/xmlenc#aes192-cbc";
public static final String AES256_CBC = "http://www.w3.org/2001/04/xmlenc#aes256-cbc";
public static final String A128KW = "http://www.w3.org/2001/04/xmlenc#kw-aes128";
public static final String A192KW = "http://www.w3.org/2001/04/xmlenc#kw-aes192";
public static final String A256KW = "http://www.w3.org/2001/04/xmlenc#kw-aes256";
public static final String RSA_1_5 = "http://www.w3.org/2001/04/xmlenc#rsa-1_5";
public static final String RSA_OAEP_MGF1P = "http://www.w3.org/2001/04/xmlenc#rsa-oaep-mgf1p";
public static final String ENVSIG = "http://www.w3.org/2000/09/xmldsig#enveloped-signature";
private Constants() {
//not called
}
}
| 3,376 |
463 | # Copyright (c) 2016, 2018, 2020 <NAME> <<EMAIL>>
# Copyright (c) 2018 <NAME> <<EMAIL>>
# Copyright (c) 2018 wgehalo <<EMAIL>>
# Copyright (c) 2018 <NAME> <<EMAIL>>
# Copyright (c) 2020-2021 hippo91 <<EMAIL>>
# Copyright (c) 2020 <NAME> <<EMAIL>>
# Copyright (c) 2021 <NAME> <<EMAIL>>
# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html
# For details: https://github.com/PyCQA/astroid/blob/master/LICENSE
import sys
import astroid
PY36 = sys.version_info >= (3, 6)
def _hashlib_transform():
signature = "value=''"
template = """
class %(name)s(object):
def __init__(self, %(signature)s): pass
def digest(self):
return %(digest)s
def copy(self):
return self
def update(self, value): pass
def hexdigest(self):
return ''
@property
def name(self):
return %(name)r
@property
def block_size(self):
return 1
@property
def digest_size(self):
return 1
"""
algorithms_with_signature = dict.fromkeys(
["md5", "sha1", "sha224", "sha256", "sha384", "sha512"], signature
)
if PY36:
blake2b_signature = "data=b'', *, digest_size=64, key=b'', salt=b'', \
person=b'', fanout=1, depth=1, leaf_size=0, node_offset=0, \
node_depth=0, inner_size=0, last_node=False"
blake2s_signature = "data=b'', *, digest_size=32, key=b'', salt=b'', \
person=b'', fanout=1, depth=1, leaf_size=0, node_offset=0, \
node_depth=0, inner_size=0, last_node=False"
new_algorithms = dict.fromkeys(
["sha3_224", "sha3_256", "sha3_384", "sha3_512", "shake_128", "shake_256"],
signature,
)
algorithms_with_signature.update(new_algorithms)
algorithms_with_signature.update(
{"blake2b": blake2b_signature, "blake2s": blake2s_signature}
)
classes = "".join(
template % {"name": hashfunc, "digest": 'b""', "signature": signature}
for hashfunc, signature in algorithms_with_signature.items()
)
return astroid.parse(classes)
astroid.register_module_extender(astroid.MANAGER, "hashlib", _hashlib_transform)
| 1,024 |
626 | <gh_stars>100-1000
package org.jsmart.zerocode.integration.tests.kafka.consume;
import org.jsmart.zerocode.core.domain.Scenario;
import org.jsmart.zerocode.core.domain.TargetEnv;
import org.jsmart.zerocode.core.runner.ZeroCodeUnitRunner;
import org.junit.Ignore;
import org.junit.Test;
import org.junit.runner.RunWith;
@TargetEnv("kafka_servers/kafka_test_server_polling.properties")
@RunWith(ZeroCodeUnitRunner.class)
public class KafkaConsumePollingTest {
/**
* When no polling time is explicitly defined in properties
* file e.g consumer.pollingTime
* Then intial poll consumer join will default to program
* defined default of 500ms.
*/
@Test
@Scenario("kafka/consume/test_kafka_consume.json")
public void testKafkaConsume() throws Exception {
}
}
| 295 |
2,039 | <filename>nd4j-backends/nd4j-api-parent/nd4j-api/src/main/java/org/nd4j/linalg/api/ops/impl/transforms/InvertPermutation.java
/*-
*
* * Copyright 2015 Skymind,Inc.
* *
* * Licensed under the Apache License, Version 2.0 (the "License");
* * you may not use this file except in compliance with the License.
* * You may obtain a copy of the License at
* *
* * http://www.apache.org/licenses/LICENSE-2.0
* *
* * Unless required by applicable law or agreed to in writing, software
* * distributed under the License is distributed on an "AS IS" BASIS,
* * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* * See the License for the specific language governing permissions and
* * limitations under the License.
*
*
*/
package org.nd4j.linalg.api.ops.impl.transforms;
import lombok.NoArgsConstructor;
import org.nd4j.autodiff.samediff.SDVariable;
import org.nd4j.autodiff.samediff.SameDiff;
import org.nd4j.imports.NoOpNameFoundException;
import java.util.Arrays;
import java.util.List;
import java.util.UUID;
/**
* Inverse of index permutation.
*
* @author <NAME>
*/
@NoArgsConstructor
public class InvertPermutation extends BaseDynamicTransformOp {
public InvertPermutation(SameDiff sameDiff, SDVariable input, boolean inPlace) {
super( sameDiff, new SDVariable[] {input}, inPlace);
}
@Override
public String opName() {
return "invert_permutation";
}
@Override
public String onnxName() {
throw new NoOpNameFoundException("No onnx name found for shape " + opName());
}
@Override
public String tensorflowName() {
return "InvertPermutation";
}
@Override
public List<SDVariable> doDiff(List<SDVariable> grad) {
SDVariable gradient = grad.get(0);
SDVariable invertedGradient = f().invertPermutation(gradient, false);
return Arrays.asList(invertedGradient);
}
}
| 713 |
621 | <gh_stars>100-1000
/*******************************************************************************
* Copyright 2013-2020 QaProSoft (http://www.qaprosoft.com).
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*******************************************************************************/
package com.qaprosoft.carina.core.foundation.utils.resources;
import java.util.ArrayList;
import java.util.List;
import java.util.Locale;
public class LocaleReader {
public static List<Locale> init(String locale) {
List<Locale> locales = new ArrayList<Locale>();
String[] strLocales = locale.split(",");
for (int i = 0; i < strLocales.length; i++) {
String[] localeSetttings = strLocales[i].trim().split("_");
String lang, country = "";
lang = localeSetttings[0];
if (localeSetttings.length > 1) {
country = localeSetttings[1];
}
locales.add(new Locale(lang, country));
}
return locales;
}
}
| 512 |
1,224 | <gh_stars>1000+
package jycessing;
public enum DisplayType {
WINDOWED,
PRESENTATION;
}
| 37 |
3,034 | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache license, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the license for the specific language governing permissions and
* limitations under the license.
*/
package org.apache.logging.log4j.taglib;
import javax.servlet.jsp.JspException;
import javax.servlet.jsp.tagext.Tag;
import org.apache.logging.log4j.Level;
import org.apache.logging.log4j.Marker;
/**
* This class implements the {@code <log:ifEnabled>} tag.
*
* @since 2.0
*/
public class IfEnabledTag extends LoggerAwareTagSupport {
private static final long serialVersionUID = 1L;
private transient Object level;
private Marker marker;
@Override
protected void init() {
super.init();
this.level = null;
this.marker = null;
}
public final void setLevel(final Object level) {
this.level = level;
}
public final void setMarker(final Marker marker) {
this.marker = marker;
}
@Override
public int doStartTag() throws JspException {
final Level level = TagUtils.resolveLevel(this.level);
if (level == null) {
throw new JspException("Level must be of type String or org.apache.logging.log4j.Level.");
}
return TagUtils.isEnabled(this.getLogger(), level, this.marker) ? Tag.EVAL_BODY_INCLUDE : Tag.SKIP_BODY;
}
}
| 651 |
682 | <reponame>HackerFoo/vtr-verilog-to-routing<gh_stars>100-1000
#ifndef SYNTH_H
#define SYNTH_H
#include "chains.h"
void init_synth(int argc, char **argv);
void create_problem(cfset_t &coeffs, reg_t input_reg = 1);
void create_random_problem(int size);
void set_bits(int b);
void solve(oplist_t *l, regmap_t *regs, reg_t (*tmpreg)());
void dump_state(ostream &os);
void state();
extern bool SLOW_C1;
extern bool SLOW_C2;
extern bool USE_TABLE;
extern bool GEN_UNIQUE;
extern bool GEN_ADDERS;
extern bool GEN_DAG;
extern bool PRINT_TARGETS;
extern bool USE_AC1;
extern bool IMPROVE_AC1;
extern bool MAX_BENEFIT;
extern int VERBOSE;
extern cfvec_t SIZES;
extern cfvec_t SPEC_ALL_TARGETS; /* all specified targets (including duplicates) */
extern cfset_t SPEC_TARGETS; /* duplicates/evens/negatives are omitted here */
extern cfset_t TARGETS; /* not yet synthesized coefficients */
extern full_addmap_t READY; /* synthesized coefficients */
extern adag * ADAG;
#endif
| 393 |
372 | /* Editor Settings: expandtabs and use 4 spaces for indentation
* ex: set softtabstop=4 tabstop=8 expandtab shiftwidth=4: *
* -*- mode: c, c-basic-offset: 4 -*- */
/*
* Copyright © BeyondTrust Software 2004 - 2019
* All rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
* BEYONDTRUST MAKES THIS SOFTWARE AVAILABLE UNDER OTHER LICENSING TERMS AS
* WELL. IF YOU HAVE ENTERED INTO A SEPARATE LICENSE AGREEMENT WITH
* BEYONDTRUST, THEN YOU MAY ELECT TO USE THE SOFTWARE UNDER THE TERMS OF THAT
* SOFTWARE LICENSE AGREEMENT INSTEAD OF THE TERMS OF THE APACHE LICENSE,
* NOTWITHSTANDING THE ABOVE NOTICE. IF YOU HAVE QUESTIONS, OR WISH TO REQUEST
* A COPY OF THE ALTERNATE LICENSING TERMS OFFERED BY BEYONDTRUST, PLEASE CONTACT
* BEYONDTRUST AT beyondtrust.com/contact
*/
/*
* Copyright (C) BeyondTrust Software. All rights reserved.
*
* Module Name:
*
* main.c
*
* Abstract:
*
* BeyondTrust Security and Authentication Subsystem (LSASS)
*
* Test Program for exercising LsaFindUserById
*
* Authors: <NAME> (<EMAIL>)
* <NAME> (<EMAIL>)
*/
#include "config.h"
#include "lsasystem.h"
#include "lsadef.h"
#include "lsa/lsa.h"
#include "lsaclient.h"
#include "lsaipc.h"
#include "common.h"
#define LW_PRINTF_STRING(x) ((x) ? (x) : "<null>")
static
VOID
ParseArgs(
int argc,
char* argv[],
uid_t* pUID,
PDWORD pdwInfoLevel
);
static
VOID
ShowUsage();
static
VOID
PrintUserInfo_0(
PLSA_USER_INFO_0 pUserInfo,
BOOLEAN bAllowedLogon
);
static
VOID
PrintUserInfo_1(
PLSA_USER_INFO_1 pUserInfo,
BOOLEAN bAllowedLogon
);
static
VOID
PrintUserInfo_2(
PLSA_USER_INFO_2 pUserInfo,
BOOLEAN bAllowedLogon
);
static
DWORD
MapErrorCode(
DWORD dwError
);
int
find_user_by_id_main(
int argc,
char* argv[]
)
{
DWORD dwError = 0;
uid_t uid = 0;
DWORD dwInfoLevel = 0;
HANDLE hLsaConnection = (HANDLE)NULL;
PVOID pUserInfo = NULL;
size_t dwErrorBufferSize = 0;
BOOLEAN bPrintOrigError = TRUE;
BOOLEAN bAllowedLogon = TRUE;
ParseArgs(argc, argv, &uid, &dwInfoLevel);
dwError = LsaOpenServer(&hLsaConnection);
BAIL_ON_LSA_ERROR(dwError);
dwError = LsaFindUserById(
hLsaConnection,
uid,
dwInfoLevel,
&pUserInfo);
BAIL_ON_LSA_ERROR(dwError);
dwError = LsaCheckUserInList(
hLsaConnection,
((PLSA_USER_INFO_0)pUserInfo)->pszName,
NULL);
if (dwError)
{
bAllowedLogon = FALSE;
}
switch(dwInfoLevel)
{
case 0:
PrintUserInfo_0((PLSA_USER_INFO_0)pUserInfo,
bAllowedLogon);
break;
case 1:
PrintUserInfo_1((PLSA_USER_INFO_1)pUserInfo,
bAllowedLogon);
break;
case 2:
PrintUserInfo_2((PLSA_USER_INFO_2)pUserInfo,
bAllowedLogon);
break;
default:
fprintf(stderr, "Error: Invalid user info level [%u]\n", dwInfoLevel);
break;
}
cleanup:
if (pUserInfo) {
LsaFreeUserInfo(dwInfoLevel, pUserInfo);
}
if (hLsaConnection != (HANDLE)NULL) {
LsaCloseServer(hLsaConnection);
}
return (dwError);
error:
dwError = MapErrorCode(dwError);
dwErrorBufferSize = LwGetErrorString(dwError, NULL, 0);
if (dwErrorBufferSize > 0)
{
DWORD dwError2 = 0;
PSTR pszErrorBuffer = NULL;
dwError2 = LwAllocateMemory(
dwErrorBufferSize,
(PVOID*)&pszErrorBuffer);
if (!dwError2)
{
DWORD dwLen = LwGetErrorString(dwError, pszErrorBuffer, dwErrorBufferSize);
if ((dwLen == dwErrorBufferSize) && !LW_IS_NULL_OR_EMPTY_STR(pszErrorBuffer))
{
fprintf(stderr,
"Failed to locate user. Error code %u (%s).\n%s\n",
dwError,
LW_PRINTF_STRING(LwWin32ExtErrorToName(dwError)),
pszErrorBuffer);
bPrintOrigError = FALSE;
}
}
LW_SAFE_FREE_STRING(pszErrorBuffer);
}
if (bPrintOrigError)
{
fprintf(stderr,
"Failed to locate user. Error code %u (%s).\n",
dwError,
LW_PRINTF_STRING(LwWin32ExtErrorToName(dwError)));
}
goto cleanup;
}
VOID
ParseArgs(
int argc,
char* argv[],
uid_t* pUID,
PDWORD pdwInfoLevel
)
{
typedef enum {
PARSE_MODE_OPEN = 0,
PARSE_MODE_LEVEL,
PARSE_MODE_DONE
} ParseMode;
int iArg = 1;
PSTR pszArg = NULL;
uid_t uid = 0;
ParseMode parseMode = PARSE_MODE_OPEN;
DWORD dwInfoLevel = 0;
BOOLEAN bUidSpecified = FALSE;
do {
pszArg = argv[iArg++];
if (pszArg == NULL || *pszArg == '\0')
{
break;
}
switch (parseMode)
{
case PARSE_MODE_OPEN:
if ((strcmp(pszArg, "--help") == 0) ||
(strcmp(pszArg, "-h") == 0))
{
ShowUsage();
exit(0);
}
else if (!strcmp(pszArg, "--level")) {
parseMode = PARSE_MODE_LEVEL;
}
else
{
if (!IsUnsignedInteger(pszArg))
{
fprintf(stderr, "Please enter a uid which is an unsigned integer.\n");
ShowUsage();
exit(1);
}
int nRead = sscanf(pszArg, "%u", (unsigned int*)&uid);
if ((nRead == EOF) || (nRead == 0)) {
fprintf(stderr, "A valid uid was not specified\n");
ShowUsage();
exit(1);
}
bUidSpecified = TRUE;
parseMode = PARSE_MODE_DONE;
}
break;
case PARSE_MODE_LEVEL:
if (!IsUnsignedInteger(pszArg))
{
fprintf(stderr, "Please enter an info level which is an unsigned integer.\n");
ShowUsage();
exit(1);
}
dwInfoLevel = atoi(pszArg);
parseMode = PARSE_MODE_OPEN;
break;
case PARSE_MODE_DONE:
ShowUsage();
exit(1);
}
} while (iArg < argc);
if (parseMode != PARSE_MODE_OPEN && parseMode != PARSE_MODE_DONE)
{
ShowUsage();
exit(1);
}
if (!bUidSpecified) {
fprintf(stderr, "Please specify a uid to query for.\n");
ShowUsage();
exit(1);
}
*pUID = uid;
*pdwInfoLevel = dwInfoLevel;
}
void
ShowUsage()
{
printf("Usage: find-user-by-id {--level [0, 1, 2]} <uid>\n");
}
VOID
PrintUserInfo_0(
PLSA_USER_INFO_0 pUserInfo,
BOOLEAN bAllowedLogon
)
{
fprintf(stdout, "User info (Level-0):\n");
fprintf(stdout, "====================\n");
fprintf(stdout, "Name: %s\n",
LW_IS_NULL_OR_EMPTY_STR(pUserInfo->pszName) ? "<null>" : pUserInfo->pszName);
fprintf(stdout, "SID: %s\n",
LW_IS_NULL_OR_EMPTY_STR(pUserInfo->pszSid) ? "<null>" : pUserInfo->pszSid);
fprintf(stdout, "Uid: %u\n", (unsigned int)pUserInfo->uid);
fprintf(stdout, "Gid: %u\n", (unsigned int)pUserInfo->gid);
fprintf(stdout, "Gecos: %s\n",
LW_IS_NULL_OR_EMPTY_STR(pUserInfo->pszGecos) ? "<null>" : pUserInfo->pszGecos);
fprintf(stdout, "Shell: %s\n",
LW_IS_NULL_OR_EMPTY_STR(pUserInfo->pszShell) ? "<null>" : pUserInfo->pszShell);
fprintf(stdout, "Home dir: %s\n",
LW_IS_NULL_OR_EMPTY_STR(pUserInfo->pszHomedir) ? "<null>" : pUserInfo->pszHomedir);
fprintf(stdout, "Logon restriction: %s\n", bAllowedLogon ? "NO" : "YES");
}
VOID
PrintUserInfo_1(
PLSA_USER_INFO_1 pUserInfo,
BOOLEAN bAllowedLogon
)
{
fprintf(stdout, "User info (Level-1):\n");
fprintf(stdout, "====================\n");
fprintf(stdout, "Name: %s\n",
LW_IS_NULL_OR_EMPTY_STR(pUserInfo->pszName) ? "<null>" : pUserInfo->pszName);
fprintf(stdout, "SID: %s\n",
LW_IS_NULL_OR_EMPTY_STR(pUserInfo->pszSid) ? "<null>" : pUserInfo->pszSid);
fprintf(stdout, "UPN: %s\n",
LW_IS_NULL_OR_EMPTY_STR(pUserInfo->pszUPN) ? "<null>" : pUserInfo->pszUPN);
fprintf(stdout, "Generated UPN: %s\n", pUserInfo->bIsGeneratedUPN ? "YES" : "NO");
fprintf(stdout, "Uid: %u\n", (unsigned int)pUserInfo->uid);
fprintf(stdout, "Gid: %u\n", (unsigned int)pUserInfo->gid);
fprintf(stdout, "Gecos: %s\n",
LW_IS_NULL_OR_EMPTY_STR(pUserInfo->pszGecos) ? "<null>" : pUserInfo->pszGecos);
fprintf(stdout, "Shell: %s\n",
LW_IS_NULL_OR_EMPTY_STR(pUserInfo->pszShell) ? "<null>" : pUserInfo->pszShell);
fprintf(stdout, "Home dir: %s\n",
LW_IS_NULL_OR_EMPTY_STR(pUserInfo->pszHomedir) ? "<null>" : pUserInfo->pszHomedir);
fprintf(stdout, "LMHash length: %u\n", pUserInfo->dwLMHashLen);
fprintf(stdout, "NTHash length: %u\n", pUserInfo->dwNTHashLen);
fprintf(stdout, "Local User: %s\n", pUserInfo->bIsLocalUser ? "YES" : "NO");
fprintf(stdout, "Logon restriction: %s\n", bAllowedLogon ? "NO" : "YES");
}
VOID
PrintUserInfo_2(
PLSA_USER_INFO_2 pUserInfo,
BOOLEAN bAllowedLogon
)
{
fprintf(stdout, "User info (Level-2):\n");
fprintf(stdout, "====================\n");
fprintf(stdout, "Name: %s\n",
LW_IS_NULL_OR_EMPTY_STR(pUserInfo->pszName) ? "<null>" : pUserInfo->pszName);
fprintf(stdout, "SID: %s\n",
LW_IS_NULL_OR_EMPTY_STR(pUserInfo->pszSid) ? "<null>" : pUserInfo->pszSid);
fprintf(stdout, "UPN: %s\n",
LW_IS_NULL_OR_EMPTY_STR(pUserInfo->pszUPN) ? "<null>" : pUserInfo->pszUPN);
fprintf(stdout, "Generated UPN: %s\n", pUserInfo->bIsGeneratedUPN ? "YES" : "NO");
fprintf(stdout, "DN: %s\n",
LW_IS_NULL_OR_EMPTY_STR(pUserInfo->pszDN) ? "<null>" : pUserInfo->pszDN);
fprintf(stdout, "Uid: %u\n", (unsigned int)pUserInfo->uid);
fprintf(stdout, "Gid: %u\n", (unsigned int)pUserInfo->gid);
fprintf(stdout, "Gecos: %s\n",
LW_IS_NULL_OR_EMPTY_STR(pUserInfo->pszGecos) ? "<null>" : pUserInfo->pszGecos);
fprintf(stdout, "Shell: %s\n",
LW_IS_NULL_OR_EMPTY_STR(pUserInfo->pszShell) ? "<null>" : pUserInfo->pszShell);
fprintf(stdout, "Home dir: %s\n",
LW_IS_NULL_OR_EMPTY_STR(pUserInfo->pszHomedir) ? "<null>" : pUserInfo->pszHomedir);
fprintf(stdout, "LMHash length: %u\n", pUserInfo->dwLMHashLen);
fprintf(stdout, "NTHash length: %u\n", pUserInfo->dwNTHashLen);
fprintf(stdout, "Local User: %s\n", pUserInfo->bIsLocalUser ? "YES" : "NO");
fprintf(stdout, "Account disabled (or locked): %s\n",
pUserInfo->bAccountDisabled ? "TRUE" : "FALSE");
fprintf(stdout, "Account Expired: %s\n",
pUserInfo->bAccountExpired ? "TRUE" : "FALSE");
fprintf(stdout, "Password never expires: %s\n",
pUserInfo->bPasswordNeverExpires ? "TRUE" : "FALSE");
fprintf(stdout, "Password Expired: %s\n",
pUserInfo->bPasswordExpired ? "TRUE" : "FALSE");
fprintf(stdout, "Prompt for password change: %s\n",
pUserInfo->bPromptPasswordChange ? "YES" : "NO");
fprintf(stdout, "User can change password: %s\n",
pUserInfo->bUserCanChangePassword ? "YES" : "NO");
fprintf(stdout, "Days till password expires: %u\n",
pUserInfo->dwDaysToPasswordExpiry);
fprintf(stdout, "Logon restriction: %s\n",
bAllowedLogon ? "NO" : "YES");
}
DWORD
MapErrorCode(
DWORD dwError
)
{
DWORD dwError2 = dwError;
switch (dwError)
{
case ECONNREFUSED:
case ENETUNREACH:
case ETIMEDOUT:
dwError2 = LW_ERROR_LSA_SERVER_UNREACHABLE;
break;
default:
break;
}
return dwError2;
}
| 7,464 |
1,062 | //
// Generated by class-dump 3.5b1 (64 bit) (Debug version compiled Dec 3 2019 19:59:57).
//
// Copyright (C) 1997-2019 <NAME>.
//
#import "NSObject-Protocol.h"
@class NSAttributedString, NSString;
@protocol WebDocumentText <NSObject>
- (void)deselectAll;
- (void)selectAll;
- (NSAttributedString *)selectedAttributedString;
- (NSString *)selectedString;
- (NSAttributedString *)attributedString;
- (NSString *)string;
- (BOOL)supportsTextEncoding;
@end
| 166 |
344 | // Copyright 2014 Google Inc. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
// Code for reading in ELF files.
//
// For information on the ELF format, see
// http://www.x86.org/ftp/manuals/tools/elf.pdf
//
// I also liked:
// http://www.caldera.com/developers/gabi/1998-04-29/contents.html
//
// A note about types: When dealing with the file format, we use types
// like Elf32_Word, but in the public interfaces we treat all
// addresses as uint64. As a result, we should be able to symbolize
// 64-bit binaries from a 32-bit process (which we don't do,
// anyway). size_t should therefore be avoided, except where required
// by things like mmap().
//
// Although most of this code can deal with arbitrary ELF files of
// either word size, the public ElfReader interface only examines
// files loaded into the current address space, which must all match
// __WORDSIZE. This code cannot handle ELF files with a non-native
// byte ordering.
//
// TODO(chatham): It would be nice if we could accomplish this task
// without using malloc(), so we could use it as the process is dying.
#ifndef _GNU_SOURCE
#define _GNU_SOURCE // needed for pread()
#endif
#include <sys/types.h>
#include <sys/stat.h>
#include <sys/mman.h>
#include <unistd.h>
#include <fcntl.h>
#include <elf.h>
#include <string.h>
#include <algorithm>
#include <map>
#include <string>
#include <vector>
#include "symbolize/elf_reader.h"
#include "base/common.h"
namespace {
// The lowest bit of an ARM symbol value is used to indicate a Thumb address.
const int kARMThumbBitOffset = 0;
// Converts an ARM Thumb symbol value to a true aligned address value.
template <typename T>
T AdjustARMThumbSymbolValue(const T& symbol_table_value) {
return symbol_table_value & ~(1 << kARMThumbBitOffset);
}
} // namespace
namespace devtools_crosstool_autofdo {
template <class ElfArch> class ElfReaderImpl;
// 32-bit and 64-bit ELF files are processed exactly the same, except
// for various field sizes. Elf32 and Elf64 encompass all of the
// differences between the two formats, and all format-specific code
// in this file is templated on one of them.
class Elf32 {
public:
typedef Elf32_Ehdr Ehdr;
typedef Elf32_Shdr Shdr;
typedef Elf32_Phdr Phdr;
typedef Elf32_Word Word;
typedef Elf32_Sym Sym;
// What should be in the EI_CLASS header.
static const int kElfClass = ELFCLASS32;
// Given a symbol pointer, return the binding type (eg STB_WEAK).
static char Bind(const Elf32_Sym *sym) {
return ELF32_ST_BIND(sym->st_info);
}
// Given a symbol pointer, return the symbol type (eg STT_FUNC).
static char Type(const Elf32_Sym *sym) {
return ELF32_ST_TYPE(sym->st_info);
}
};
class Elf64 {
public:
typedef Elf64_Ehdr Ehdr;
typedef Elf64_Shdr Shdr;
typedef Elf64_Phdr Phdr;
typedef Elf64_Word Word;
typedef Elf64_Sym Sym;
// What should be in the EI_CLASS header.
static const int kElfClass = ELFCLASS64;
static char Bind(const Elf64_Sym *sym) {
return ELF64_ST_BIND(sym->st_info);
}
static char Type(const Elf64_Sym *sym) {
return ELF64_ST_TYPE(sym->st_info);
}
};
// ElfSectionReader mmaps a section of an ELF file ("section" is ELF
// terminology). The ElfReaderImpl object providing the section header
// must exist for the lifetime of this object.
//
// The motivation for mmaping individual sections of the file is that
// many Google executables are large enough when unstripped that we
// have to worry about running out of virtual address space.
template<class ElfArch>
class ElfSectionReader {
public:
ElfSectionReader(const string &path, int fd,
const typename ElfArch::Shdr §ion_header)
: header_(section_header) {
// Back up to the beginning of the page we're interested in.
const size_t additional = header_.sh_offset % getpagesize();
const size_t offset_aligned = header_.sh_offset - additional;
section_size_ = header_.sh_size;
size_aligned_ = section_size_ + additional;
contents_aligned_ = mmap(NULL, size_aligned_, PROT_READ, MAP_SHARED,
fd, offset_aligned);
if (contents_aligned_ == MAP_FAILED)
PLOG(FATAL) << "Could not mmap " << path;
// Set where the offset really should begin.
contents_ = reinterpret_cast<const char*>(contents_aligned_) +
(header_.sh_offset - offset_aligned);
}
~ElfSectionReader() {
munmap(contents_aligned_, size_aligned_);
}
// Return the section header for this section.
typename ElfArch::Shdr const &header() const { return header_; }
// Return memory at the given offset within this section.
const char *GetOffset(typename ElfArch::Word bytes) const {
CHECK(contents_ != NULL);
return contents_ + bytes;
}
const char *contents() const { return contents_; }
size_t section_size() const { return section_size_; }
private:
// page-aligned file contents
void *contents_aligned_;
// pointer within contents_aligned_ to where the section data begins
const char *contents_;
// size of contents_aligned_
size_t size_aligned_;
// size of contents.
size_t section_size_;
const typename ElfArch::Shdr header_;
DISALLOW_EVIL_CONSTRUCTORS(ElfSectionReader);
};
// An iterator over symbols in a given section. It handles walking
// through the entries in the specified section and mapping symbol
// entries to their names in the appropriate string table (in
// another section).
template<class ElfArch>
class SymbolIterator {
public:
SymbolIterator(ElfReaderImpl<ElfArch> *reader,
typename ElfArch::Word section_type)
: symbol_section_(reader->GetSectionByType(section_type)),
string_section_(NULL),
num_symbols_in_section_(0),
symbol_within_section_(0) {
CHECK(section_type == SHT_SYMTAB || section_type == SHT_DYNSYM);
// If this section type doesn't exist, leave
// num_symbols_in_section_ as zero, so this iterator is already
// done().
if (symbol_section_ != NULL) {
num_symbols_in_section_ = symbol_section_->header().sh_size /
symbol_section_->header().sh_entsize;
// Symbol sections have sh_link set to the section number of
// the string section containing the symbol names.
CHECK_NE(symbol_section_->header().sh_link, 0);
string_section_ = reader->GetSection(symbol_section_->header().sh_link);
}
}
// Return true iff we have passed all symbols in this section.
bool done() const {
return symbol_within_section_ >= num_symbols_in_section_;
}
// Advance to the next symbol in this section.
// REQUIRES: !done()
void Next() { ++symbol_within_section_; }
// Return a pointer to the current symbol.
// REQUIRES: !done()
const typename ElfArch::Sym *GetSymbol() const {
CHECK(!done());
return reinterpret_cast<const typename ElfArch::Sym*>(
symbol_section_->GetOffset(symbol_within_section_ *
symbol_section_->header().sh_entsize));
}
// Return the name of the current symbol, NULL if it has none.
// REQUIRES: !done()
const char *GetSymbolName() const {
int name_offset = GetSymbol()->st_name;
if (name_offset == 0)
return NULL;
return string_section_->GetOffset(name_offset);
}
private:
const ElfSectionReader<ElfArch> *const symbol_section_;
const ElfSectionReader<ElfArch> *string_section_;
int num_symbols_in_section_;
int symbol_within_section_;
DISALLOW_EVIL_CONSTRUCTORS(SymbolIterator);
};
// Copied from strings/strutil.h. Per chatham,
// this library should not depend on strings.
static inline bool MyHasSuffixString(const string& str, const string& suffix) {
int len = str.length();
int suflen = suffix.length();
return (suflen <= len) && (str.compare(len-suflen, suflen, suffix) == 0);
}
// ElfReader loads an ELF binary and can provide information about its
// contents. It is most useful for matching addresses to function
// names. It does not understand debugging formats (eg dwarf2), so it
// can't print line numbers. It takes a path to an elf file and a
// readable file descriptor for that file, which it does not assume
// ownership of.
template<class ElfArch>
class ElfReaderImpl {
public:
explicit ElfReaderImpl(const string &path, int fd)
: path_(path),
fd_(fd),
section_headers_(NULL),
program_headers_(NULL) {
CHECK_GE(fd_, 0);
string error;
CHECK(IsArchElfFile(fd, &error)) << " Could not parse file: " << error;
is_dwp_ = MyHasSuffixString(path, ".dwp");
ParseHeaders(fd, path);
}
~ElfReaderImpl() {
for (int i = 0; i < sections_.size(); ++i)
delete sections_[i];
delete [] section_headers_;
delete [] program_headers_;
}
// Examine the headers of the file and return whether the file looks
// like an ELF file for this architecture. Takes an already-open
// file descriptor for the candidate file, reading in the prologue
// to see if the ELF file appears to match the current
// architecture. If error is non-NULL, it will be set with a reason
// in case of failure.
static bool IsArchElfFile(int fd, string *error) {
unsigned char header[EI_NIDENT];
if (pread(fd, header, sizeof(header), 0) != sizeof(header)) {
if (error != NULL) *error = "Could not read header";
return false;
}
if (memcmp(header, ELFMAG, SELFMAG) != 0) {
if (error != NULL) *error = "Missing ELF magic";
return false;
}
if (header[EI_CLASS] != ElfArch::kElfClass) {
if (error != NULL) *error = "Different word size";
return false;
}
int endian = 0;
if (header[EI_DATA] == ELFDATA2LSB)
endian = __LITTLE_ENDIAN;
else if (header[EI_DATA] == ELFDATA2MSB)
endian = __BIG_ENDIAN;
if (endian != __BYTE_ORDER) {
if (error != NULL) *error = "Different byte order";
return false;
}
return true;
}
void VisitSymbols(typename ElfArch::Word section_type,
ElfReader::SymbolSink *sink) {
VisitSymbols(section_type, sink, -1, -1, false);
}
void VisitSymbols(typename ElfArch::Word section_type,
ElfReader::SymbolSink *sink,
int symbol_binding,
int symbol_type,
bool get_raw_symbol_values) {
for (SymbolIterator<ElfArch> it(this, section_type);
!it.done(); it.Next()) {
const char *name = it.GetSymbolName();
if (!name) continue;
const typename ElfArch::Sym *sym = it.GetSymbol();
if (sink->filter && !sink->filter(name, sym->st_value, sym->st_size,
ElfArch::Bind(sym), ElfArch::Type(sym), sym->st_shndx))
continue;
if ((symbol_binding < 0 || ElfArch::Bind(sym) == symbol_binding) &&
(symbol_type < 0 || ElfArch::Type(sym) == symbol_type)) {
typename ElfArch::Sym symbol = *sym;
if (!get_raw_symbol_values)
AdjustSymbolValue(&symbol);
sink->AddSymbol(name, symbol.st_value, symbol.st_size,
ElfArch::Bind(sym), ElfArch::Type(sym), sym->st_shndx);
}
}
}
// Return an ElfSectionReader for the first section of the given
// type by iterating through all section headers. Returns NULL if
// the section type is not found.
const ElfSectionReader<ElfArch> *GetSectionByType(
typename ElfArch::Word section_type) {
for (int k = 0; k < GetNumSections(); ++k) {
if (section_headers_[k].sh_type == section_type) {
return GetSection(k);
}
}
return NULL;
}
// Return the name of section "shndx". Returns NULL if the section
// is not found.
const char *GetSectionNameByIndex(int shndx) {
return GetSectionName(section_headers_[shndx].sh_name);
}
// Return a pointer to section "shndx", and store the size in
// "size". Returns NULL if the section is not found.
const char *GetSectionContentsByIndex(int shndx, size_t *size) {
const ElfSectionReader<ElfArch> *section = GetSection(shndx);
if (section != NULL) {
*size = section->section_size();
return section->contents();
}
return NULL;
}
// Return the index of the first section of the given type by iterating
// through all section headers, starting at the specified start_index.
// Returns -1 if the section type is not found.
int GetSectionIndexByType(uint32_t type, int start_index) {
for (int shndx = start_index; shndx < GetNumSections(); ++shndx) {
if (section_headers_[shndx].sh_type == type) {
return shndx;
}
}
return -1;
}
// Return a pointer to the first section of the given name by
// iterating through all section headers, and store the size in
// "size". Returns NULL if the section name is not found.
const char *GetSectionContentsByName(const string §ion_name,
size_t *size) {
for (int k = 0; k < GetNumSections(); ++k) {
// When searching for sections in a .dwp file, the sections
// we're looking for will always be at the end of the section
// table, so reverse the direction of iteration.
int shndx = is_dwp_ ? GetNumSections() - k - 1 : k;
const char *name = GetSectionName(section_headers_[shndx].sh_name);
if (name != NULL && ElfReader::SectionNamesMatch(section_name, name)) {
const ElfSectionReader<ElfArch> *section = GetSection(shndx);
if (section == NULL) {
return NULL;
} else {
*size = section->section_size();
return section->contents();
}
}
}
return NULL;
}
// This is like GetSectionContentsByName() but it returns a lot of extra
// information about the section.
const char *GetSectionInfoByName(const string §ion_name,
ElfReader::SectionInfo *info) {
for (int k = 0; k < GetNumSections(); ++k) {
// When searching for sections in a .dwp file, the sections
// we're looking for will always be at the end of the section
// table, so reverse the direction of iteration.
int shndx = is_dwp_ ? GetNumSections() - k - 1 : k;
const char *name = GetSectionName(section_headers_[shndx].sh_name);
if (name != NULL && ElfReader::SectionNamesMatch(section_name, name)) {
const ElfSectionReader<ElfArch> *section = GetSection(shndx);
if (section == NULL) {
return NULL;
} else {
info->type = section->header().sh_type;
info->flags = section->header().sh_flags;
info->addr = section->header().sh_addr;
info->offset = section->header().sh_offset;
info->size = section->header().sh_size;
info->link = section->header().sh_link;
info->info = section->header().sh_info;
info->addralign = section->header().sh_addralign;
info->entsize = section->header().sh_entsize;
return section->contents();
}
}
}
return NULL;
}
// p_vaddr of the first PT_LOAD segment (if any), or 0 if no PT_LOAD
// segments are present. This is the address an ELF image was linked
// (by static linker) to be loaded at. Usually (but not always) 0 for
// shared libraries and position-independent executables.
uint64 VaddrOfFirstLoadSegment() const {
// Relocatable objects (of type ET_REL) do not have LOAD segments.
if (header_.e_type == ET_REL) {
return 0;
}
for (int i = 0; i < GetNumProgramHeaders(); ++i) {
if (program_headers_[i].p_type == PT_LOAD) {
return program_headers_[i].p_vaddr;
}
}
LOG(ERROR) << "Could not find LOAD from program header: " << path_;
return 0;
}
// According to the LSB ("ELF special sections"), sections with debug
// info are prefixed by ".debug". The names are not specified, but they
// look like ".debug_line", ".debug_info", etc.
bool HasDebugSections() {
// Debug sections are likely to be near the end, so reverse the
// direction of iteration.
for (int k = GetNumSections() - 1; k >= 0; --k) {
const char *name = GetSectionName(section_headers_[k].sh_name);
if (strncmp(name, ".debug", strlen(".debug")) == 0)
return true;
}
return false;
}
bool IsDynamicSharedObject() const {
return header_.e_type == ET_DYN;
}
private:
typedef vector<pair<uint64, const typename ElfArch::Sym *> > AddrToSymMap;
static bool AddrToSymSorter(const typename AddrToSymMap::value_type& lhs,
const typename AddrToSymMap::value_type& rhs) {
return lhs.first < rhs.first;
}
static bool AddrToSymEquals(const typename AddrToSymMap::value_type& lhs,
const typename AddrToSymMap::value_type& rhs) {
return lhs.first == rhs.first;
}
// Does this ELF file have too many sections to fit in the program header?
bool HasManySections() const {
return header_.e_shnum == SHN_UNDEF;
}
// Return the number of program headers.
int GetNumProgramHeaders() const {
if (HasManySections() && header_.e_phnum == 0xffff &&
first_section_header_.sh_info != 0)
return first_section_header_.sh_info;
return header_.e_phnum;
}
// Return the number of sections.
int GetNumSections() const {
if (HasManySections())
return first_section_header_.sh_size;
return header_.e_shnum;
}
// Return the index of the string table.
int GetStringTableIndex() const {
if (HasManySections()) {
if (header_.e_shstrndx == 0xffff)
return first_section_header_.sh_link;
else if (header_.e_shstrndx >= GetNumSections())
return 0;
}
return header_.e_shstrndx;
}
// Given an offset into the section header string table, return the
// section name.
const char *GetSectionName(typename ElfArch::Word sh_name) {
const ElfSectionReader<ElfArch> *shstrtab =
GetSection(GetStringTableIndex());
if (shstrtab != NULL) {
CHECK_GE(shstrtab->section_size(), sh_name);
return shstrtab->GetOffset(sh_name);
}
return NULL;
}
// Return an ElfSectionReader for the given section. The reader will
// be freed when this object is destroyed.
const ElfSectionReader<ElfArch> *GetSection(int num) {
CHECK_LT(num, GetNumSections());
const char *name;
// Hard-coding the name for the section-name string table prevents
// infinite recursion.
if (num == GetStringTableIndex())
name = ".shstrtab";
else
name = GetSectionNameByIndex(num);
ElfSectionReader<ElfArch> *& reader = sections_[num];
if (reader == NULL)
reader = new ElfSectionReader<ElfArch>(path_, fd_,
section_headers_[num]);
return reader;
}
// Parse out the overall header information from the file and assert
// that it looks sane. This contains information like the magic
// number and target architecture.
bool ParseHeaders(int fd, const string &path) {
// Read in the global ELF header.
if (pread(fd, &header_, sizeof(header_), 0) != sizeof(header_)) {
LOG(ERROR) << "Could not read ELF header: " << path;
return false;
}
// Must be an executable, dynamic shared object or relocatable object
if (header_.e_type != ET_EXEC &&
header_.e_type != ET_DYN &&
header_.e_type != ET_REL) {
LOG(ERROR) << "Not an executable, shared object or relocatable object "
"file: " << path;
return false;
}
// Need a section header.
if (header_.e_shoff == 0) {
LOG(ERROR) << "No section header: " << path;
return false;
}
if (header_.e_shnum == SHN_UNDEF) {
// The number of sections in the program header is only a 16-bit value. In
// the event of overflow (greater than SHN_LORESERVE sections), e_shnum
// will read SHN_UNDEF and the true number of section header table entries
// is found in the sh_size field of the first section header.
// See: http://www.sco.com/developers/gabi/2003-12-17/ch4.sheader.html
if (pread(fd, &first_section_header_, sizeof(first_section_header_),
header_.e_shoff) != sizeof(first_section_header_)) {
LOG(ERROR) << "Failed to read first section header: " << path;
return false;
}
}
// Dynamically allocate enough space to store the section headers
// and read them out of the file.
const int section_headers_size =
GetNumSections() * sizeof(*section_headers_);
section_headers_ = new typename ElfArch::Shdr[section_headers_size];
if (pread(fd, section_headers_, section_headers_size, header_.e_shoff) !=
section_headers_size) {
LOG(ERROR) << "Could not read section headers: " << path;
return false;
}
// Dynamically allocate enough space to store the program headers
// and read them out of the file.
const int program_headers_size =
GetNumProgramHeaders() * sizeof(*program_headers_);
program_headers_ = new typename ElfArch::Phdr[GetNumProgramHeaders()];
if (pread(fd, program_headers_, program_headers_size, header_.e_phoff) !=
program_headers_size) {
LOG(ERROR) << "Could not read program headers: " << path
<< " Continue anyway";
}
// Presize the sections array for efficiency.
sections_.resize(GetNumSections(), NULL);
return true;
}
void AdjustSymbolValue(typename ElfArch::Sym* sym) {
switch (header_.e_machine) {
case EM_ARM:
// For ARM architecture, if the LSB of the function symbol offset is set,
// it indicates a Thumb function. This bit should not be taken literally.
// Clear it.
if (ElfArch::Type(sym) == STT_FUNC)
sym->st_value = AdjustARMThumbSymbolValue(sym->st_value);
break;
case EM_386:
// No adjustment needed for Intel x86 architecture. However, explicitly
// define this case as we use it quite often.
break;
case EM_PPC:
// PowerPC architecture may need adjustment in the future.
break;
default:
break;
}
}
friend class SymbolIterator<ElfArch>;
// The file we're reading.
const string path_;
// Open file descriptor for path_. Not owned by this object.
const int fd_;
// The global header of the ELF file.
typename ElfArch::Ehdr header_;
// The header of the first section. This may be used to supplement the ELF
// file header.
typename ElfArch::Shdr first_section_header_;
// Array of GetNumSections() section headers, allocated when we read
// in the global header.
typename ElfArch::Shdr *section_headers_;
// Array of GetNumProgramHeaders() program headers, allocated when we read
// in the global header.
typename ElfArch::Phdr *program_headers_;
// An array of pointers to ElfSectionReaders. Sections are
// mmaped as they're needed and not released until this object is
// destroyed.
vector<ElfSectionReader<ElfArch>*> sections_;
// True if this is a .dwp file.
bool is_dwp_;
DISALLOW_EVIL_CONSTRUCTORS(ElfReaderImpl);
};
ElfReader::ElfReader(const string &path)
: path_(path), fd_(-1), impl32_(NULL), impl64_(NULL) {
// linux 2.6.XX kernel can show deleted files like this:
// /var/run/nscd/dbYLJYaE (deleted)
// and the kernel-supplied vdso and vsyscall mappings like this:
// [vdso]
// [vsyscall]
if (MyHasSuffixString(path, " (deleted)"))
return;
if (path == "[vdso]")
return;
if (path == "[vsyscall]")
return;
fd_ = open(path.c_str(), O_RDONLY);
if (fd_ == -1) {
// Not ERROR, since this gets called with things like "[heap]".
PLOG(INFO) << "Could not open " << path_;
}
}
ElfReader::~ElfReader() {
if (fd_ != -1)
close(fd_);
if (impl32_ != NULL)
delete impl32_;
if (impl64_ != NULL)
delete impl64_;
}
// The only word-size specific part of this file is IsNativeElfFile().
#if __WORDSIZE == 32
#define NATIVE_ELF_ARCH Elf32
#elif __WORDSIZE == 64
#define NATIVE_ELF_ARCH Elf64
#else
#error "Invalid word size"
#endif
template <typename ElfArch>
static bool IsElfFile(const int fd, const string &path) {
if (fd < 0)
return false;
if (!ElfReaderImpl<ElfArch>::IsArchElfFile(fd, NULL)) {
// No error message here. IsElfFile gets called many times.
return false;
}
return true;
}
bool ElfReader::IsNativeElfFile() const {
return IsElfFile<NATIVE_ELF_ARCH>(fd_, path_);
}
bool ElfReader::IsElf32File() const {
return IsElfFile<Elf32>(fd_, path_);
}
bool ElfReader::IsElf64File() const {
return IsElfFile<Elf64>(fd_, path_);
}
void ElfReader::VisitSymbols(ElfReader::SymbolSink *sink) {
VisitSymbols(sink, -1, -1);
}
void ElfReader::VisitSymbols(ElfReader::SymbolSink *sink,
int symbol_binding,
int symbol_type) {
VisitSymbols(sink, symbol_binding, symbol_type, false);
}
void ElfReader::VisitSymbols(ElfReader::SymbolSink *sink,
int symbol_binding,
int symbol_type,
bool get_raw_symbol_values) {
if (IsElf32File()) {
GetImpl32()->VisitSymbols(SHT_SYMTAB, sink, symbol_binding, symbol_type,
get_raw_symbol_values);
GetImpl32()->VisitSymbols(SHT_DYNSYM, sink, symbol_binding, symbol_type,
get_raw_symbol_values);
} else if (IsElf64File()) {
GetImpl64()->VisitSymbols(SHT_SYMTAB, sink, symbol_binding, symbol_type,
get_raw_symbol_values);
GetImpl64()->VisitSymbols(SHT_DYNSYM, sink, symbol_binding, symbol_type,
get_raw_symbol_values);
}
}
uint64 ElfReader::VaddrOfFirstLoadSegment() {
if (IsElf32File()) {
return GetImpl32()->VaddrOfFirstLoadSegment();
} else if (IsElf64File()) {
return GetImpl64()->VaddrOfFirstLoadSegment();
} else {
LOG(ERROR) << "not an elf binary: " << path_;
return 0;
}
}
int ElfReader::GetSectionIndexByType(uint32_t type, int start_index) {
if (IsElf32File()) {
return GetImpl32()->GetSectionIndexByType(type, start_index);
} else if (IsElf64File()) {
return GetImpl64()->GetSectionIndexByType(type, start_index);
} else {
LOG(ERROR) << "not an elf binary: " << path_;
return -1;
}
}
const char *ElfReader::GetSectionName(int shndx) {
if (IsElf32File()) {
return GetImpl32()->GetSectionNameByIndex(shndx);
} else if (IsElf64File()) {
return GetImpl64()->GetSectionNameByIndex(shndx);
} else {
LOG(ERROR) << "not an elf binary: " << path_;
return NULL;
}
}
const char *ElfReader::GetSectionByIndex(int shndx, size_t *size) {
if (IsElf32File()) {
return GetImpl32()->GetSectionContentsByIndex(shndx, size);
} else if (IsElf64File()) {
return GetImpl64()->GetSectionContentsByIndex(shndx, size);
} else {
LOG(ERROR) << "not an elf binary: " << path_;
return NULL;
}
}
const char *ElfReader::GetSectionByName(const string §ion_name,
size_t *size) {
if (IsElf32File()) {
return GetImpl32()->GetSectionContentsByName(section_name, size);
} else if (IsElf64File()) {
return GetImpl64()->GetSectionContentsByName(section_name, size);
} else {
LOG(ERROR) << "not an elf binary: " << path_;
return NULL;
}
}
const char *ElfReader::GetSectionInfoByName(const string §ion_name,
SectionInfo *info) {
if (IsElf32File()) {
return GetImpl32()->GetSectionInfoByName(section_name, info);
} else if (IsElf64File()) {
return GetImpl64()->GetSectionInfoByName(section_name, info);
} else {
LOG(ERROR) << "not an elf binary: " << path_;
return NULL;
}
}
bool ElfReader::SectionNamesMatch(const string &name, const string &sh_name) {
if ((name.find(".debug_", 0) == 0) && (sh_name.find(".zdebug_", 0) == 0)) {
const string name_suffix(name, strlen(".debug_"));
const string sh_name_suffix(sh_name, strlen(".zdebug_"));
return name_suffix == sh_name_suffix;
}
return name == sh_name;
}
string ElfReader::GetBuildId() {
std::vector<string> build_ids;
// The format of the .note section is as follows (see "ELF file Format"):
typedef struct BuildIdNoteSection {
uint32 namesz;
uint32 descsz;
uint32 type;
char gnu_name[4];
uint8 id[];
} BuildIdNoteSection;
for (int nindex = GetSectionIndexByType(SHT_NOTE, 0); nindex >= 0;
nindex = GetSectionIndexByType(SHT_NOTE, nindex + 1)) {
size_t size;
const BuildIdNoteSection *id_note =
reinterpret_cast<const BuildIdNoteSection *>(
GetSectionByIndex(nindex, &size));
if (id_note != nullptr && size >= sizeof(*id_note) &&
id_note->type == NT_GNU_BUILD_ID &&
memcmp(id_note->gnu_name, "GNU\0", 4) == 0 && id_note->descsz <= 20) {
// Pre-fill with '0' so that the build ID is always 40 chars long.
// TODO(dehao): remove [adding once quipper is fixed (see b/21597512)
string build_id((id_note->descsz << 1), '0');
const char hexdigits[] = "0123456789abcdef";
for (int i = 0; i < id_note->descsz; i++) {
build_id[2 * i] = hexdigits[(id_note->id[i]) >> 4];
build_id[2 * i + 1] = hexdigits[id_note->id[i] & 0x0f];
}
build_ids.push_back(build_id);
}
}
switch (build_ids.size()) {
case 0:
return "";
case 1:
return build_ids[0];
default:
// Repeated builds-ids. Complain and ignore them.
LOG(ERROR) << "Ignoring multiple GNU_BUILD_ID notes";
return "";
}
}
bool ElfReader::IsDynamicSharedObject() {
if (IsElf32File()) {
return GetImpl32()->IsDynamicSharedObject();
} else if (IsElf64File()) {
return GetImpl64()->IsDynamicSharedObject();
} else {
LOG(ERROR) << "not an elf binary: " << path_;
return false;
}
}
ElfReaderImpl<Elf32> *ElfReader::GetImpl32() {
if (impl32_ == NULL) {
impl32_ = new ElfReaderImpl<Elf32>(path_, fd_);
}
return impl32_;
}
ElfReaderImpl<Elf64> *ElfReader::GetImpl64() {
if (impl64_ == NULL) {
impl64_ = new ElfReaderImpl<Elf64>(path_, fd_);
}
return impl64_;
}
// Return true if file is an ELF binary of ElfArch, with unstripped
// debug info (debug_only=true) or symbol table (debug_only=false).
// Otherwise, return false.
template <typename ElfArch>
static bool IsNonStrippedELFBinaryImpl(const string &path, const int fd,
bool debug_only) {
if (!ElfReaderImpl<ElfArch>::IsArchElfFile(fd, NULL)) return false;
ElfReaderImpl<ElfArch> elf_reader(path, fd);
return debug_only ?
elf_reader.HasDebugSections()
: (elf_reader.GetSectionByType(SHT_SYMTAB) != NULL);
}
// Helper for the IsNon[Debug]StrippedELFBinary functions.
static bool IsNonStrippedELFBinaryHelper(const string &path,
bool debug_only) {
const int fd = open(path.c_str(), O_RDONLY);
if (fd == -1) {
return false;
}
if (IsNonStrippedELFBinaryImpl<Elf32>(path, fd, debug_only) ||
IsNonStrippedELFBinaryImpl<Elf64>(path, fd, debug_only)) {
close(fd);
return true;
}
close(fd);
return false;
}
bool ElfReader::IsNonStrippedELFBinary(const string &path) {
return IsNonStrippedELFBinaryHelper(path, false);
}
bool ElfReader::IsNonDebugStrippedELFBinary(const string &path) {
return IsNonStrippedELFBinaryHelper(path, true);
}
} // namespace devtools_crosstool_autofdo
| 12,207 |
912 | '''
------------------------------------------------------------------------------
Copyright (c) 2015 Microsoft Corporation
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
------------------------------------------------------------------------------
'''
from __future__ import unicode_literals
from .session_base import SessionBase
from time import time
class Session(SessionBase):
def __init__(self,
token_type,
expires_in,
scope_string,
access_token,
client_id,
auth_server_url,
redirect_uri,
refresh_token=None,
client_secret=None):
self.token_type = token_type
self._expires_at = time() + int(expires_in)
self.scope = scope_string.split(" ")
self.access_token = access_token
self.client_id = client_id
self.auth_server_url = auth_server_url
self.redirect_uri = redirect_uri
self.refresh_token = refresh_token
self.client_secret = client_secret
def is_expired(self):
"""Whether or not the session has expired
Returns:
bool: True if the session has expired, otherwise false
"""
# Add a 10 second buffer in case the token is just about to expire
return self._expires_at < time() - 10
def refresh_session(self, expires_in, scope_string, access_token, refresh_token):
self._expires_at = time() + int(expires_in)
self.scope = scope_string.split(" ")
self.access_token = access_token
self.refresh_token = refresh_token
def save_session(self, **save_session_kwargs):
"""Save the current session.
IMPORTANT: This implementation should only be used for debugging.
For real applications, the Session object should be subclassed and
both save_session() and load_session() should be overwritten using
the client system's correct mechanism (keychain, database, etc.).
Remember, the access_token should be treated the same as a password.
Args:
save_session_kwargs (dicr): To be used by implementation
of save_session, however save_session wants to use them. The
default implementation (this one) takes a relative or absolute
file path for pickle save location, under the name "path"
"""
path = "session.pickle"
if "path" in save_session_kwargs:
path = save_session_kwargs["path"]
with open(path, "wb") as session_file:
import pickle
# pickle.HIGHEST_PROTOCOL is binary format. Good perf.
pickle.dump(self, session_file, pickle.HIGHEST_PROTOCOL)
@staticmethod
def load_session(**load_session_kwargs):
"""Save the current session.
IMPORTANT: This implementation should only be used for debugging.
For real applications, the Session object should be subclassed and
both save_session() and load_session() should be overwritten using
the client system's correct mechanism (keychain, database, etc.).
Remember, the access_token should be treated the same as a password.
Args:
load_session_kwargs (dict): To be used by implementation
of load_session, however load_session wants to use them. The
default implementation (this one) takes a relative or absolute
file path for pickle save location, under the name "path"
Returns:
:class:`Session`: The loaded session
"""
path = "session.pickle"
if "path" in load_session_kwargs:
path = load_session_kwargs["path"]
with open(path, "rb") as session_file:
import pickle
return pickle.load(session_file)
| 1,781 |
527 | <reponame>dhanainme/multi-model-server
#!/usr/bin/env python3
""" Customised system and mms process metrics for monitoring and pass-fail criteria in taurus"""
# Copyright 2018 Amazon.com, Inc. or its affiliates. All Rights Reserved.
# Licensed under the Apache License, Version 2.0 (the "License").
# You may not use this file except in compliance with the License.
# A copy of the License is located at
# http://www.apache.org/licenses/LICENSE-2.0
# or in the "license" file accompanying this file. This file is distributed
# on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
# express or implied. See the License for the specific language governing
# permissions and limitations under the License.
from enum import Enum
from statistics import mean
import psutil
from psutil import NoSuchProcess, ZombieProcess
class ProcessType(Enum):
""" Type of MMS processes to compute metrics on """
FRONTEND = 1
WORKER = 2
ALL = 3
operators = {
'sum': sum,
'avg': mean,
'min': min,
'max': max
}
process_metrics = {
# cpu
'cpu_percent': lambda p: p.get('cpu_percent', 0),
'cpu_user_time': lambda p: getattr(p.get('cpu_times', {}), 'user', 0),
'cpu_system_time': lambda p: getattr(p.get('cpu_times', {}), 'system', 0),
'cpu_iowait_time': lambda p: getattr(p.get('cpu_times', {}), 'iowait', 0),
# memory
'memory_percent': lambda p: p.get('memory_percent', 0),
'memory_rss': lambda p: getattr(p.get('memory_info', {}), 'rss', 0),
'memory_vms': lambda p: getattr(p.get('memory_info', {}), 'vms', 0),
# io
'io_read_count': lambda p: getattr(p.get('io_counters', {}), 'read_count', 0),
'io_write_count': lambda p: getattr(p.get('io_counters', {}), 'write_count', 0),
'io_read_bytes': lambda p: getattr(p.get('io_counters', {}), 'read_bytes', 0),
'io_write_bytes': lambda p: getattr(p.get('io_counters', {}), 'write_bytes', 0),
'file_descriptors': lambda p: p.get('num_fds', 0),
# processes
'threads': lambda p: p.get('num_threads', 0)
}
system_metrics = {
'system_disk_used': None,
'system_memory_percent': None,
'system_read_count': None,
'system_write_count': None,
'system_read_bytes': None,
'system_write_bytes': None,
}
misc_metrics = {
'total_processes': None,
'total_workers': None,
'orphans': None
}
AVAILABLE_METRICS = list(system_metrics) + list(misc_metrics)
WORKER_NAME = 'model_service_worker.py'
for metric in list(process_metrics):
for ptype in list(ProcessType):
if ptype == ProcessType.WORKER:
PNAME = 'workers'
for op in list(operators):
AVAILABLE_METRICS.append('{}_{}_{}'.format(op, PNAME, metric))
elif ptype == ProcessType.FRONTEND:
PNAME = 'frontend'
AVAILABLE_METRICS.append('{}_{}'.format(PNAME, metric))
else:
PNAME = 'all'
for op in list(operators):
AVAILABLE_METRICS.append('{}_{}_{}'.format(op, PNAME, metric))
children = set()
def get_metrics(server_process, child_processes, logger):
""" Get Server processes specific metrics
"""
result = {}
children.update(child_processes)
logger.debug("children : {0}".format(",".join([str(c.pid) for c in children])))
def update_metric(metric_name, proc_type, stats):
stats = list(filter(lambda x: isinstance(x, (float, int)), stats))
stats = stats if len(stats) else [0]
if proc_type == ProcessType.WORKER:
proc_name = 'workers'
elif proc_type == ProcessType.FRONTEND:
proc_name = 'frontend'
result[proc_name + '_' + metric_name] = stats[0]
return
else:
proc_name = 'all'
for op_name in operators:
result['{}_{}_{}'.format(op_name, proc_name, metric_name)] = operators[op_name](stats)
processes_stats = []
reclaimed_pids = []
try:
# as_dict() gets all stats in one shot
processes_stats.append({'type': ProcessType.FRONTEND, 'stats': server_process.as_dict()})
except:
pass
for child in children:
try:
child_cmdline = child.cmdline()
if psutil.pid_exists(child.pid) and len(child_cmdline) >= 2 and WORKER_NAME in child_cmdline[1]:
processes_stats.append({'type': ProcessType.WORKER, 'stats': child.as_dict()})
else:
reclaimed_pids.append(child)
logger.debug('child {0} no longer available'.format(child.pid))
except (NoSuchProcess, ZombieProcess):
reclaimed_pids.append(child)
logger.debug('child {0} no longer available'.format(child.pid))
for p in reclaimed_pids:
children.remove(p)
### PROCESS METRICS ###
worker_stats = list(map(lambda x: x['stats'], \
filter(lambda x: x['type'] == ProcessType.WORKER, processes_stats)))
server_stats = list(map(lambda x: x['stats'], \
filter(lambda x: x['type'] == ProcessType.FRONTEND, processes_stats)))
all_stats = list(map(lambda x: x['stats'], processes_stats))
for k in process_metrics:
update_metric(k, ProcessType.WORKER, list(map(process_metrics[k], worker_stats)))
update_metric(k, ProcessType.ALL, list(map(process_metrics[k], all_stats)))
update_metric(k, ProcessType.FRONTEND, list(map(process_metrics[k], server_stats)))
# Total processes
result['total_processes'] = len(worker_stats) + 1
result['total_workers'] = max(len(worker_stats) - 1, 0)
result['orphans'] = len(list(filter(lambda p: p['ppid'] == 1, worker_stats)))
### SYSTEM METRICS ###
result['system_disk_used'] = psutil.disk_usage('/').used
result['system_memory_percent'] = psutil.virtual_memory().percent
system_disk_io_counters = psutil.disk_io_counters()
result['system_read_count'] = system_disk_io_counters.read_count
result['system_write_count'] = system_disk_io_counters.write_count
result['system_read_bytes'] = system_disk_io_counters.read_bytes
result['system_write_bytes'] = system_disk_io_counters.write_bytes
return result
| 2,527 |
852 | import FWCore.ParameterSet.Config as cms
pythiaDefaultBlock = cms.PSet(
pythiaDefault = cms.vstring('PMAS(5,1)=4.8 ! b quark mass',
'PMAS(6,1)=172.3 ! t quark mass')
)
| 85 |
551 | @class MIMEAddress, Source;
@interface Package : NSObject
@property (nonatomic, retain, readonly) NSString *id;
@property (nonatomic, retain, readonly) NSString *name;
@property (nonatomic, retain, readonly) MIMEAddress *author;
@property (nonatomic, retain, readonly) NSString *installed;
@property (nonatomic, retain, readonly) Source *source;
@property (nonatomic, retain, readonly) NSArray *relations;
@property (nonatomic, retain, readonly) NSString *homepage;
@property (readonly) BOOL isCommercial;
@property (nonatomic, retain, readonly) NSString *latest;
@property (nonatomic, retain, readonly) NSString *section;
@property (nonatomic, retain, readonly) NSString *shortDescription;
@end
| 207 |
11,351 | package com.netflix.discovery.util;
import com.netflix.appinfo.AmazonInfo;
import com.netflix.appinfo.DataCenterInfo;
import com.netflix.appinfo.InstanceInfo;
import org.junit.Assert;
import org.junit.Test;
/**
* @author <NAME>
*/
public class EurekaUtilsTest {
@Test
public void testIsInEc2() {
InstanceInfo instanceInfo1 = new InstanceInfo.Builder(InstanceInfoGenerator.takeOne())
.setDataCenterInfo(new DataCenterInfo() {
@Override
public Name getName() {
return Name.MyOwn;
}
})
.build();
Assert.assertFalse(EurekaUtils.isInEc2(instanceInfo1));
InstanceInfo instanceInfo2 = InstanceInfoGenerator.takeOne();
Assert.assertTrue(EurekaUtils.isInEc2(instanceInfo2));
}
@Test
public void testIsInVpc() {
InstanceInfo instanceInfo1 = new InstanceInfo.Builder(InstanceInfoGenerator.takeOne())
.setDataCenterInfo(new DataCenterInfo() {
@Override
public Name getName() {
return Name.MyOwn;
}
})
.build();
Assert.assertFalse(EurekaUtils.isInVpc(instanceInfo1));
InstanceInfo instanceInfo2 = InstanceInfoGenerator.takeOne();
Assert.assertFalse(EurekaUtils.isInVpc(instanceInfo2));
InstanceInfo instanceInfo3 = InstanceInfoGenerator.takeOne();
((AmazonInfo) instanceInfo3.getDataCenterInfo()).getMetadata()
.put(AmazonInfo.MetaDataKey.vpcId.getName(), "vpc-123456");
Assert.assertTrue(EurekaUtils.isInVpc(instanceInfo3));
}
}
| 809 |
2,329 | /*
* Copyright 2010-2012 VMware and contributors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springsource.loaded.ri;
import java.lang.annotation.Annotation;
import java.lang.annotation.Inherited;
import java.lang.ref.WeakReference;
import java.lang.reflect.AccessibleObject;
import java.lang.reflect.AnnotatedElement;
import java.lang.reflect.Constructor;
import java.lang.reflect.Field;
import java.lang.reflect.InvocationTargetException;
import java.lang.reflect.Method;
import java.lang.reflect.Modifier;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.WeakHashMap;
import java.util.logging.Level;
import java.util.logging.Logger;
import java.util.regex.Matcher;
import org.objectweb.asm.Opcodes;
import org.objectweb.asm.Type;
import org.springsource.loaded.C;
import org.springsource.loaded.Constants;
import org.springsource.loaded.CurrentLiveVersion;
import org.springsource.loaded.FieldMember;
import org.springsource.loaded.GlobalConfiguration;
import org.springsource.loaded.MethodMember;
import org.springsource.loaded.ReloadException;
import org.springsource.loaded.ReloadableType;
import org.springsource.loaded.TypeDescriptor;
import org.springsource.loaded.TypeRegistry;
import org.springsource.loaded.Utils;
import org.springsource.loaded.infra.UsedByGeneratedCode;
import org.springsource.loaded.jvm.JVM;
import org.springsource.loaded.support.ConcurrentWeakIdentityHashMap;
/**
* The reflective interceptor is called to rewrite any reflective calls that are found in the bytecode. Intercepting the
* calls means we can delegate to the SpringLoaded infrastructure.
*
* @author <NAME>
* @author <NAME>
* @since 0.5.0
*/
public class ReflectiveInterceptor {
public static Logger log = Logger.getLogger(ReflectiveInterceptor.class.getName());
private static Map<Class<?>, WeakReference<ReloadableType>> classToRType = null;
static {
boolean synchronize = false;
try {
String prop = System.getProperty("springloaded.synchronize", "false");
if (prop.equalsIgnoreCase("true")) {
synchronize = true;
}
}
catch (Throwable t) {
// likely security manager
}
if (synchronize) {
classToRType = Collections.synchronizedMap(new WeakHashMap<Class<?>, WeakReference<ReloadableType>>());
}
else {
classToRType = new ConcurrentWeakIdentityHashMap<Class<?>, WeakReference<ReloadableType>>();
// classToRType = new WeakHashMap<Class<?>, WeakReference<ReloadableType>>();
}
}
@UsedByGeneratedCode
public static boolean jlosHasStaticInitializer(Class<?> clazz) {
ReloadableType rtype = getRType(clazz);
if (rtype == null) {
// Exception tells the caller to use the 'old way' to determine if there is a static initializer
throw new IllegalStateException();
}
return rtype.hasStaticInitializer();
}
/*
* Implementation of java.lang.class.getDeclaredMethod(String name, Class... params).
*/
@UsedByGeneratedCode
public static Method jlClassGetDeclaredMethod(Class<?> clazz, String name, Class<?>... params)
throws SecurityException,
NoSuchMethodException {
ReloadableType rtype = getRType(clazz);
if (rtype == null) {
// Not reloadable...
return clazz.getDeclaredMethod(name, params);
}
else {
// Reloadable
MethodProvider methods = MethodProvider.create(rtype);
Invoker method = methods.getDeclaredMethod(name, params);
if (method == null) {
throw Exceptions.noSuchMethodException(clazz, name, params);
}
else {
return method.createJavaMethod();
}
}
}
/*
* Implementation of java.lang.class.getMethod(String name, Class... params).
*/
@UsedByGeneratedCode
public static Method jlClassGetMethod(Class<?> clazz, String name, Class<?>... params) throws SecurityException,
NoSuchMethodException {
ReloadableType rtype = getRType(clazz);
if (rtype == null) {
// Not reloadable...
return clazz.getMethod(name, params);
}
else {
MethodProvider methods = MethodProvider.create(rtype);
Invoker method = methods.getMethod(name, params);
if (method == null) {
throw Exceptions.noSuchMethodException(clazz, name, params);
}
else {
return method.createJavaMethod();
}
}
}
public static Method[] jlClassGetDeclaredMethods(Class<?> clazz) {
ReloadableType rtype = getRType(clazz);
if (rtype == null) {
// Not reloadable...
return clazz.getDeclaredMethods();
}
else {
MethodProvider methods = MethodProvider.create(rtype);
List<Invoker> invokers = methods.getDeclaredMethods();
Method[] javaMethods = new Method[invokers.size()];
for (int i = 0; i < javaMethods.length; i++) {
javaMethods[i] = invokers.get(i).createJavaMethod();
}
return javaMethods;
}
}
public static Method[] jlClassGetMethods(Class<?> clazz) {
ReloadableType rtype = getRType(clazz);
if (rtype == null) {
// Not reloadable...
return clazz.getMethods();
}
else {
MethodProvider methods = MethodProvider.create(rtype);
Collection<Invoker> invokers = methods.getMethods();
Method[] javaMethods = new Method[invokers.size()];
int i = 0;
for (Invoker invoker : invokers) {
javaMethods[i++] = invoker.createJavaMethod();
}
return javaMethods;
}
}
////////////////////////////////////////////////////////////////////////////////////////////////////////////////
static String toParamString(Class<?>[] params) {
if (params == null || params.length == 0) {
return "()";
}
StringBuilder s = new StringBuilder();
s.append('(');
for (int i = 0, max = params.length; i < max; i++) {
if (i > 0) {
s.append(", ");
}
if (params[i] == null) {
s.append("null");
}
else {
s.append(params[i].getName());
}
}
s.append(')');
return s.toString();
}
private static int depth = 4;
/*
* Get the Class that declares the method calling interceptor method that called this method.
*/
@SuppressWarnings("deprecation")
public static Class<?> getCallerClass() {
//0 = sun.reflect.Reflection.getCallerClass
//1 = this method's frame
//2 = caller of 'getCallerClass' = asAccesibleMethod
//3 = caller of 'asAccesibleMethod' = jlrInvoke
//4 = caller we are interested in...
// In jdk17u25 there is an extra frame inserted:
// "This also fixes a regression introduced in 7u25 in which
// getCallerClass(int) is now a Java method that adds an additional frame
// that wasn't taken into account." in https://permalink.gmane.org/gmane.comp.java.openjdk.jdk7u.devel/6573
Class<?> caller = sun.reflect.Reflection.getCallerClass(depth);
if (caller == ReflectiveInterceptor.class) {
// If this is true we have that extra frame on the stack
depth = 5;
caller = sun.reflect.Reflection.getCallerClass(depth);
}
String callerClassName = caller.getName();
Matcher matcher = Constants.executorClassNamePattern.matcher(callerClassName);
if (matcher.find()) {
// Complication... the caller may in fact be an executor method...
// in this case the caller will be an executor class.
ClassLoader loader = caller.getClassLoader();
try {
return Class.forName(callerClassName.substring(0, matcher.start()), false, loader);
}
catch (ClassNotFoundException e) {
//Supposedly it wasn't an executor class after all...
log.log(Level.INFO, "Potential trouble determining caller of reflective method", e);
}
}
return caller;
}
/**
* Called to satisfy an invocation of java.lang.Class.getDeclaredAnnotations().
*
* @param clazz the class upon which the original call was being invoked
* @return array of annotations on the class
*/
public static Annotation[] jlClassGetDeclaredAnnotations(Class<?> clazz) {
if (TypeRegistry.nothingReloaded) {
return clazz.getDeclaredAnnotations();
}
ReloadableType rtype = getReloadableTypeIfHasBeenReloaded(clazz);
if (rtype == null) {
return clazz.getDeclaredAnnotations();
}
CurrentLiveVersion clv = rtype.getLiveVersion();
return clv.getExecutorClass().getDeclaredAnnotations();
}
/*
* Called to satisfy an invocation of java.lang.Class.getDeclaredAnnotations().
*
* @param clazz the class upon which the original call was being invoked
*/
public static Annotation[] jlClassGetAnnotations(Class<?> clazz) {
if (TypeRegistry.nothingReloaded) {
return clazz.getAnnotations();
}
ReloadableType rtype = getRType(clazz);
//Note: even if class has not been reloaded, it's superclass may have been and this may affect
// the inherited annotations, so we must *not* use 'getReloadableTypeIfHasBeenReloaded' above!
if (rtype == null) {
return clazz.getAnnotations();
}
Class<?> superClass = clazz.getSuperclass();
if (superClass == null) {
return jlClassGetDeclaredAnnotations(clazz); //Nothing to inherit so it's ok to call this
}
Map<Class<? extends Annotation>, Annotation> combinedAnnotations = new HashMap<Class<? extends Annotation>, Annotation>();
Annotation[] annotationsToAdd = jlClassGetAnnotations(superClass);
for (Annotation annotation : annotationsToAdd) {
if (isInheritable(annotation)) {
combinedAnnotations.put(annotation.annotationType(), annotation);
}
}
annotationsToAdd = jlClassGetDeclaredAnnotations(clazz);
for (Annotation annotation : annotationsToAdd) {
combinedAnnotations.put(annotation.annotationType(), annotation);
}
return combinedAnnotations.values().toArray(new Annotation[combinedAnnotations.size()]);
}
public static Annotation jlClassGetAnnotation(Class<?> clazz, Class<? extends Annotation> annoType) {
ReloadableType rtype = getRType(clazz);
//Note: even if class has not been reloaded, it's superclass may have been and this may affect
// the inherited annotations, so we must *not* use 'getReloadableTypeIfHasBeenReloaded' above!
if (rtype == null) {
return clazz.getAnnotation(annoType);
}
if (annoType == null) {
throw new NullPointerException();
}
for (Annotation localAnnot : jlClassGetDeclaredAnnotations(clazz)) {
if (localAnnot.annotationType() == annoType) {
return localAnnot;
}
}
if (annoType.isAnnotationPresent(Inherited.class)) {
Class<?> superClass = clazz.getSuperclass();
if (superClass != null) {
return jlClassGetAnnotation(superClass, annoType);
}
}
return null;
}
public static boolean jlClassIsAnnotationPresent(Class<?> clazz, Class<? extends Annotation> annoType) {
ReloadableType rtype = getRType(clazz);
//Note: even if class has not been reloaded, it's superclass may have been and this may affect
// the inherited annotations, so we must *not* use 'getReloadableTypeIfHasBeenReloaded' above!
if (rtype == null) {
return clazz.isAnnotationPresent(annoType);
}
return jlClassGetAnnotation(clazz, annoType) != null;
}
public static Constructor<?>[] jlClassGetDeclaredConstructors(Class<?> clazz) {
ReloadableType rtype = getRType(clazz);
if (rtype == null) {
// Non reloadable type
Constructor<?>[] cs = clazz.getDeclaredConstructors();
return cs;
}
else if (!rtype.hasBeenReloaded()) {
// Reloadable but not yet reloaded
Constructor<?>[] cs = clazz.getDeclaredConstructors();
int i = 0;
for (Constructor<?> c : cs) {
if (isMetaConstructor(clazz, c)) {
// We must remove the 'special' constructor added by SpringLoaded
continue;
}
// SpringLoaded changes modifiers, so must fix them
fixModifier(rtype, c);
cs[i++] = c;
}
return Utils.arrayCopyOf(cs, i);
}
else {
CurrentLiveVersion liveVersion = rtype.getLiveVersion();
// Reloaded type
Constructor<?>[] clazzCs = null;
TypeDescriptor desc = rtype.getLatestTypeDescriptor();
MethodMember[] members = desc.getConstructors();
Constructor<?>[] cs = new Constructor<?>[members.length];
for (int i = 0; i < cs.length; i++) {
MethodMember m = members[i];
if (!liveVersion.hasConstructorChanged(m)) {
if (clazzCs == null) {
clazzCs = clazz.getDeclaredConstructors();
}
cs[i] = findConstructor(clazzCs, m);
// SpringLoaded changes modifiers, so must fix them
fixModifier(rtype, cs[i]);
}
else {
cs[i] = newConstructor(rtype, m);
}
}
return cs;
}
}
private static Constructor<?> findConstructor(Constructor<?>[] constructors, MethodMember searchFor) {
String paramDescriptor = searchFor.getDescriptor();
for (int i = 0, max = constructors.length; i < max; i++) {
String candidateDescriptor = Utils.toConstructorDescriptor(constructors[i].getParameterTypes());
if (candidateDescriptor.equals(paramDescriptor)) {
return constructors[i];
}
}
return null;
}
private static boolean isMetaConstructor(Class<?> clazz, Constructor<?> c) {
Class<?>[] params = c.getParameterTypes();
if (clazz.isEnum()) {
return params.length > 2 && params[2].getName().equals(Constants.magicDescriptorForGeneratedCtors);
}
else if (clazz.getSuperclass() != null && clazz.getSuperclass().getName().equals("groovy.lang.Closure")) {
return params.length > 2 && params[2].getName().equals(Constants.magicDescriptorForGeneratedCtors);
}
else {
return params.length > 0 && params[0].getName().equals(Constants.magicDescriptorForGeneratedCtors);
}
}
private static Constructor<?> newConstructor(ReloadableType rtype, MethodMember m) {
ClassLoader classLoader = rtype.getTypeRegistry().getClassLoader();
try {
return JVM.newConstructor(Utils.toClass(rtype), //declaring
Utils.toParamClasses(m.getDescriptor(), classLoader), // params
Utils.slashedNamesToClasses(m.getExceptions(), classLoader), //exceptions
m.getModifiers(), //modifiers
m.getGenericSignature() //signature
);
}
catch (ClassNotFoundException e) {
throw new IllegalStateException("Couldn't create j.l.Constructor for " + m, e);
}
}
private static void fixModifiers(ReloadableType rtype, Field[] fields) {
TypeDescriptor typeDesc = rtype.getLatestTypeDescriptor();
for (Field field : fields) {
fixModifier(typeDesc, field);
}
}
static void fixModifier(TypeDescriptor typeDesc, Field field) {
int mods = typeDesc.getField(field.getName()).getModifiers();
if (mods != field.getModifiers()) {
JVM.setFieldModifiers(field, mods);
}
}
protected static void fixModifier(ReloadableType rtype, Constructor<?> constructor) {
String desc = Type.getConstructorDescriptor(constructor);
MethodMember rCons = rtype.getCurrentConstructor(desc);
if (constructor.getModifiers() != rCons.getModifiers()) {
JVM.setConstructorModifiers(constructor, rCons.getModifiers());
}
}
public static Constructor<?>[] jlClassGetConstructors(Class<?> clazz) {
ReloadableType rtype = getRType(clazz);
if (rtype == null) {
return clazz.getConstructors();
}
else {
Constructor<?>[] candidates = jlClassGetDeclaredConstructors(clazz);
//We need to throw away any non-public constructors.
List<Constructor<?>> keep = new ArrayList<Constructor<?>>(candidates.length);
for (Constructor<?> candidate : candidates) {
if (Modifier.isPublic(candidate.getModifiers())) {
keep.add(candidate);
}
}
return keep.toArray(new Constructor<?>[keep.size()]);
}
}
public static Constructor<?> jlClassGetDeclaredConstructor(Class<?> clazz, Class<?>... params)
throws SecurityException,
NoSuchMethodException {
ReloadableType rtype = getRType(clazz);
if (rtype == null) {
// Non reloadable type
Constructor<?> c = clazz.getDeclaredConstructor(params);
return c;
}
else if (!rtype.hasBeenReloaded()) {
// Reloadable but not yet reloaded
Constructor<?> c = clazz.getDeclaredConstructor(params);
if (isMetaConstructor(clazz, c)) {
// not a real constructor !
throw Exceptions.noSuchConstructorException(clazz, params);
}
// SpringLoaded changes modifiers, so must fix them
fixModifier(rtype, c);
return c;
}
else {
// This would be the right thing to do but makes getDeclaredConstructors() very messy
CurrentLiveVersion clv = rtype.getLiveVersion();
boolean b = clv.hasConstructorChanged(Utils.toConstructorDescriptor(params));
if (!b) {
Constructor<?> c = clazz.getDeclaredConstructor(params);
if (isMetaConstructor(clazz, c)) {
// not a real constructor !
throw Exceptions.noSuchConstructorException(clazz, params);
}
// SpringLoaded changes modifiers, so must fix them
fixModifier(rtype, c);
return c;
}
else {
// Reloaded type
TypeDescriptor desc = rtype.getLatestTypeDescriptor();
MethodMember[] members = desc.getConstructors();
String searchFor = Utils.toConstructorDescriptor(params);
for (MethodMember m : members) {
if (m.getDescriptor().equals(searchFor)) {
return newConstructor(rtype, m);
}
}
throw Exceptions.noSuchConstructorException(clazz, params);
}
}
}
public static Constructor<?> jlClassGetConstructor(Class<?> clazz, Class<?>... params) throws SecurityException,
NoSuchMethodException {
ReloadableType rtype = getRType(clazz);
if (rtype == null) {
return clazz.getConstructor(params);
}
else {
Constructor<?> c = jlClassGetDeclaredConstructor(clazz, params);
if (Modifier.isPublic(c.getModifiers())) {
return c;
}
else {
throw Exceptions.noSuchMethodException(clazz, "<init>", params);
}
}
}
private static boolean isInheritable(Annotation annotation) {
return annotation.annotationType().isAnnotationPresent(Inherited.class);
}
/**
* Performs access checks and returns a (potential) copy of the method with accessibility flag set if this necessary
* for the invoke to succeed.
* <p>
* Also checks for deleted methods.
* <p>
* If any checks fail, an appropriate exception is raised.
*/
private static Method asAccessibleMethod(ReloadableType methodDeclaringTypeReloadableType, Method method,
Object target,
boolean makeAccessibleCopy) throws IllegalAccessException {
if (methodDeclaringTypeReloadableType != null && isDeleted(methodDeclaringTypeReloadableType, method)) {
throw Exceptions.noSuchMethodError(method);
}
if (method.isAccessible()) {
//More expensive check not required / copy not required
}
else {
Class<?> clazz = method.getDeclaringClass();
int mods = method.getModifiers();
int classmods;
// ReloadableType rtype = getReloadableTypeIfHasBeenReloaded(clazz);
if (methodDeclaringTypeReloadableType == null || !methodDeclaringTypeReloadableType.hasBeenReloaded()) {
classmods = clazz.getModifiers();
}
else {
//Note: the "super bit" may be set in class modifiers but we should block it out, it
//shouldn't be shown to users of the reflection API.
classmods = methodDeclaringTypeReloadableType.getLatestTypeDescriptor().getModifiers()
& ~Opcodes.ACC_SUPER;
}
if (Modifier.isPublic(mods & classmods/*jlClassGetModifiers(clazz)*/)) {
//More expensive check not required / copy not required
}
else {
//More expensive check required
Class<?> callerClass = getCallerClass();
JVM.ensureMemberAccess(callerClass, clazz, target, mods);
if (makeAccessibleCopy) {
method = JVM.copyMethod(method); // copy: we must not change accessible flag on original method!
method.setAccessible(true);
}
}
}
return makeAccessibleCopy ? method : null;
}
private static Constructor<?> asAccessibleConstructor(Constructor<?> c, boolean makeAccessibleCopy)
throws NoSuchMethodException, IllegalAccessException {
if (isDeleted(c)) {
throw Exceptions.noSuchConstructorError(c);
}
Class<?> clazz = c.getDeclaringClass();
int mods = c.getModifiers();
if (c.isAccessible() || Modifier.isPublic(mods & jlClassGetModifiers(clazz))) {
//More expensive check not required / copy not required
}
else {
//More expensive check required
Class<?> callerClass = getCallerClass();
JVM.ensureMemberAccess(callerClass, clazz, null, mods);
if (makeAccessibleCopy) {
c = JVM.copyConstructor(c); // copy: we must not change accessible flag on original method!
c.setAccessible(true);
}
}
return makeAccessibleCopy ? c : null;
}
/**
* Performs access checks and returns a (potential) copy of the field with accessibility flag set if this necessary
* for the acces operation to succeed.
* <p>
* If any checks fail, an appropriate exception is raised.
*
* Warning this method is sensitive to stack depth! Should expects to be called DIRECTLY from a jlr redicriction
* method only!
*/
private static Field asAccessibleField(Field field, Object target, boolean makeAccessibleCopy)
throws IllegalAccessException {
if (isDeleted(field)) {
throw Exceptions.noSuchFieldError(field);
}
Class<?> clazz = field.getDeclaringClass();
int mods = field.getModifiers();
if (field.isAccessible() || Modifier.isPublic(mods & jlClassGetModifiers(clazz))) {
//More expensive check not required / copy not required
}
else {
//More expensive check required
Class<?> callerClass = getCallerClass();
JVM.ensureMemberAccess(callerClass, clazz, target, mods);
if (makeAccessibleCopy) {
//TODO: This code is not covered by a test. It needs a non-reloadable type with non-public
// field, being accessed reflectively from a context that is "priviliged" to access it without setting the access flag.
field = JVM.copyField(field); // copy: we must not change accessible flag on original method!
field.setAccessible(true);
}
}
return makeAccessibleCopy ? field : null;
}
/**
* Performs all necessary checks that need to be done before a field set should be allowed.
*
* @throws IllegalAccessException
*/
private static Field asSetableField(Field field, Object target, Class<?> valueType, Object value,
boolean makeAccessibleCopy)
throws IllegalAccessException {
// Must do the checks exactly in the same order as JVM if we want identical error messages.
// JVM doesn't do this, since it cannot happen without reloading, we do it first of all.
if (isDeleted(field)) {
throw Exceptions.noSuchFieldError(field);
}
Class<?> clazz = field.getDeclaringClass();
int mods = field.getModifiers();
if (field.isAccessible() || Modifier.isPublic(mods & jlClassGetModifiers(clazz))) {
//More expensive check not required / copy not required
}
else {
//More expensive check required
Class<?> callerClass = getCallerClass();
JVM.ensureMemberAccess(callerClass, clazz, target, mods);
if (makeAccessibleCopy) {
//TODO: This code is not covered by a test. It needs a non-reloadable type with non-public
// field, being accessed reflectively from a context that is "priviliged" to access it without setting the access flag.
field = JVM.copyField(field); // copy: we must not change accessible flag on original field!
field.setAccessible(true);
}
}
if (isPrimitive(valueType)) {
//It seems for primitive types, the order of the checks (in Sun JVM) is different!
typeCheckFieldSet(field, valueType, value);
if (!field.isAccessible() && Modifier.isFinal(mods)) {
throw Exceptions.illegalSetFinalFieldException(field, field.getType(), coerce(value, field.getType()));
}
}
else {
if (!field.isAccessible() && Modifier.isFinal(mods)) {
throw Exceptions.illegalSetFinalFieldException(field, valueType, value);
}
typeCheckFieldSet(field, valueType, value);
}
return makeAccessibleCopy ? field : null;
}
private static Object coerce(Object value, Class<?> toType) {
//Warning: this method's implementation is not for general use, it's only intended use is to
// ensure correctness of error messages, so it doesn't need to cover all 'coercable' cases,
// only those cases where the coerced value print out differently, and which are reachable
// from 'asSetableField'.
Class<? extends Object> fromType = value.getClass();
if (Integer.class.equals(fromType)) {
if (float.class.equals(toType)) {
return (float) (Integer) value;
}
else if (double.class.equals(toType)) {
return (double) (Integer) value;
}
}
else if (Byte.class.equals(fromType)) {
if (float.class.equals(toType)) {
return (float) (Byte) value;
}
else if (double.class.equals(toType)) {
return (double) (Byte) value;
}
}
else if (Character.class.equals(fromType)) {
if (int.class.equals(toType)) {
return (int) (Character) value;
}
else if (long.class.equals(toType)) {
return (long) (Character) value;
}
else if (float.class.equals(toType)) {
return (float) (Character) value;
}
else if (double.class.equals(toType)) {
return (double) (Character) value;
}
}
else if (Short.class.equals(fromType)) {
if (float.class.equals(toType)) {
return (float) (Short) value;
}
else if (double.class.equals(toType)) {
return (double) (Short) value;
}
}
else if (Long.class.equals(fromType)) {
if (float.class.equals(toType)) {
return (float) (Long) value;
}
else if (double.class.equals(toType)) {
return (double) (Long) value;
}
}
else if (Float.class.equals(fromType)) {
if (double.class.equals(toType)) {
return (double) (Float) value;
}
}
return value;
}
/**
* Perform a dynamic type check needed when setting a field value onto a field. Raises the appropriate exception
* when the check fails and returns normally otherwise. This method should only be called for object types. For
* primitive types call the three parameter variant instead.
*
* @throws IllegalAccessException
*/
private static void typeCheckFieldSet(Field field, Object value) throws IllegalAccessException {
Class<?> fieldType = field.getType();
if (value == null) {
if (fieldType.isPrimitive()) {
throw Exceptions.illegalSetFieldTypeException(field, null, value);
}
}
else {
if (fieldType.isPrimitive()) {
fieldType = boxTypeFor(fieldType);
}
Class<?> valueType = value.getClass();
if (!Utils.isConvertableFrom(fieldType, valueType)) {
throw Exceptions.illegalSetFieldTypeException(field, valueType, value);
}
}
}
/**
* Perform a dynamic type check needed when setting a field value onto a field. Raises the appropriate exception
* when the check fails and returns normally otherwise.
*
* @throws IllegalAccessException
*/
private static void typeCheckFieldSet(Field field, Class<?> valueType, Object value) throws IllegalAccessException {
if (!isPrimitive(valueType)) {
//Call the version of this method that considers autoboxing
typeCheckFieldSet(field, value);
}
else {
//Value type is primitive.
// Note: In this case value was a primitive value that became boxed, so it can't be null.
Class<?> fieldType = field.getType();
if (!Utils.isConvertableFrom(fieldType, valueType)) {
throw Exceptions.illegalSetFieldTypeException(field, valueType, value);
}
}
}
/**
* Checks whether given 'valueType' is a primitive type, considering that we use 'null' as the type for 'null' (to
* distinguish it from the type 'Object' which is not the same!)
*/
private static boolean isPrimitive(Class<?> valueType) {
return valueType != null && valueType.isPrimitive();
}
/**
* Determine a "valueType" from a given value object. Note that this should really only be used for values that are
* non-primitive, otherwise it will be impossible to distinguish between a primitive value and its boxed
* representation.
* <p>
* In a context where you have a primitive value that gets boxed up, its valueType should be passed in explicitly as
* a class like, for example, int.class.
*/
private static Class<?> valueType(Object value) {
if (value == null) {
return null;
}
else {
return value.getClass();
}
}
/**
* Retrieve modifiers for a Java class, which might or might not be reloadable or reloaded.
*
* @param clazz the class for which to discover modifiers
* @return the modifiers
*/
public static int jlClassGetModifiers(Class<?> clazz) {
// ReloadableType rtype = getReloadableTypeIfHasBeenReloaded(clazz);
ReloadableType rtype = getRType(clazz);
if (rtype == null) {
return clazz.getModifiers();
}
else {
//Note: the "super bit" may be set in class modifiers but we should block it out, it
//shouldn't be shown to users of the reflection API.
return rtype.getLatestTypeDescriptor().getModifiers() & ~Opcodes.ACC_SUPER;
}
}
private static boolean isDeleted(ReloadableType rtype, Method method) {
// ReloadableType rtype = getReloadableTypeIfHasBeenReloaded(method.getDeclaringClass());
if (rtype == null || !rtype.hasBeenReloaded()) {
return false;
}
else {
MethodMember currentMethod = rtype.getCurrentMethod(method.getName(), Type.getMethodDescriptor(method));
if (currentMethod == null) {
return true; // Method not there, consider it deleted
}
else {
return MethodMember.isDeleted(currentMethod); // Deleted bit is set consider deleted
}
}
}
private static boolean isDeleted(Constructor<?> c) {
ReloadableType rtype = getReloadableTypeIfHasBeenReloaded(c.getDeclaringClass());
if (rtype == null) {
return false;
}
else {
TypeDescriptor desc = rtype.getLatestTypeDescriptor();
MethodMember currentConstructor = desc.getConstructor(Type.getConstructorDescriptor(c));
if (currentConstructor == null) {
//TODO: test case with a deleted constructor
return true; // Method not there, consider it deleted
}
else {
return false;
}
}
}
private static boolean isDeleted(Field field) {
ReloadableType rtype = getReloadableTypeIfHasBeenReloaded(field.getDeclaringClass());
if (rtype == null) {
return false;
}
else {
TypeDescriptor desc = rtype.getLatestTypeDescriptor();
FieldMember currentField = desc.getField(field.getName());
if (currentField == null) {
return true; // Method not there, consider it deleted
}
else {
return false;
}
// Fields don't have deleted bits now, but maybe they get them in the future?
// } else {
// return FieldMember.isDeleted(currentField); // Deleted bit is set consider deleted
// }
}
}
/**
* If clazz is reloadable <b>and</b> has been reloaded at least once then return the ReloadableType instance for it,
* otherwise return null.
*
* @param clazz the type which may or may not be reloadable
* @return the reloadable type or null
*/
private static ReloadableType getReloadableTypeIfHasBeenReloaded(Class<?> clazz) {
if (TypeRegistry.nothingReloaded) {
return null;
}
ReloadableType rtype = getRType(clazz);
if (rtype != null && rtype.hasBeenReloaded()) {
return rtype;
}
else {
return null;
}
}
private final static boolean theOldWay = false;
/**
* Access and return the ReloadableType field on a specified class.
*
* @param clazz the class for which to discover the reloadable type
* @return the reloadable type for the class, or null if not reloadable
*/
public static ReloadableType getRType(Class<?> clazz) {
// ReloadableType rtype = null;
WeakReference<ReloadableType> ref = classToRType.get(clazz);
ReloadableType rtype = null;
if (ref != null) {
rtype = ref.get();
}
if (rtype == null) {
if (!theOldWay) {
// 'theOldWay' attempts to grab the field from the type via reflection. This usually works except
// in cases where the class is not resolved yet since it can cause the class to resolve and its
// static initializer to run. This was happening on a grails compile where the compiler is
// loading dependencies (but not initializing them). Instead we can use this route of
// discovering the type registry and locating the reloadable type. This does some map lookups
// which may be a problem, but once discovered, it is cached in the weak ref so that shouldn't
// be an ongoing perf problem.
// TODO testcases for something that is reloaded without having been resolved
ClassLoader cl = clazz.getClassLoader();
TypeRegistry tr = TypeRegistry.getTypeRegistryFor(cl);
if (tr == null) {
classToRType.put(clazz, ReloadableType.NOT_RELOADABLE_TYPE_REF);
}
else {
rtype = tr.getReloadableType(clazz.getName().replace('.', '/'));
if (rtype == null) {
classToRType.put(clazz, ReloadableType.NOT_RELOADABLE_TYPE_REF);
}
else {
classToRType.put(clazz, new WeakReference<ReloadableType>(rtype));
}
}
}
else {
// need to work it out
Field rtypeField;
try {
// System.out.println("discovering field for " + clazz.getName());
// TODO cache somewhere - will need a clazz>Field cache
rtypeField = clazz.getDeclaredField(Constants.fReloadableTypeFieldName);
}
catch (NoSuchFieldException nsfe) {
classToRType.put(clazz, ReloadableType.NOT_RELOADABLE_TYPE_REF);
// expensive if constantly discovering this
return null;
}
try {
rtypeField.setAccessible(true);
rtype = (ReloadableType) rtypeField.get(null);
if (rtype == null) {
classToRType.put(clazz, ReloadableType.NOT_RELOADABLE_TYPE_REF);
throw new ReloadException("ReloadableType field '" + Constants.fReloadableTypeFieldName
+ "' is 'null' on type " + clazz.getName());
}
else {
classToRType.put(clazz, new WeakReference<ReloadableType>(rtype));
}
}
catch (Exception e) {
throw new ReloadException("Unable to access ReloadableType field '"
+ Constants.fReloadableTypeFieldName
+ "' on type " + clazz.getName(), e);
}
}
}
else if (rtype == ReloadableType.NOT_RELOADABLE_TYPE) {
return null;
}
return rtype;
}
public static Annotation[] jlrMethodGetDeclaredAnnotations(Method method) {
ReloadableType rtype = getReloadableTypeIfHasBeenReloaded(method.getDeclaringClass());
if (rtype == null) {
//Nothing special to be done
return method.getDeclaredAnnotations();
}
else {
// Method could have changed...
CurrentLiveVersion clv = rtype.getLiveVersion();
MethodMember methodMember = rtype.getCurrentMethod(method.getName(), Type.getMethodDescriptor(method));
if (MethodMember.isCatcher(methodMember)) {
if (clv.getExecutorMethod(methodMember) != null) {
throw new IllegalStateException();
}
return method.getDeclaredAnnotations();
}
Method executor = clv.getExecutorMethod(methodMember);
return executor.getAnnotations();
}
}
public static Annotation[][] jlrMethodGetParameterAnnotations(Method method) {
ReloadableType rtype = getReloadableTypeIfHasBeenReloaded(method.getDeclaringClass());
if (rtype == null) {
//Nothing special to be done
return method.getParameterAnnotations();
}
else {
// Method could have changed...
CurrentLiveVersion clv = rtype.getLiveVersion();
MethodMember currentMethod = rtype.getCurrentMethod(method.getName(), Type.getMethodDescriptor(method));
Method executor = clv.getExecutorMethod(currentMethod);
Annotation[][] result = executor.getParameterAnnotations();
if (!currentMethod.isStatic()) {
//Non=static methods have an extra param.
//Though extra param is added to front...
//Annotations aren't being moved so we have to actually drop the *last* array element
result = Utils.arrayCopyOf(result, result.length - 1);
}
return result;
}
}
public static Object jlClassNewInstance(Class<?> clazz) throws SecurityException, NoSuchMethodException,
IllegalArgumentException, InstantiationException, IllegalAccessException, InvocationTargetException {
// Note: no special case for non-reloadable types here, because access checks:
// access checks depend on stack depth and springloaded rewriting changes that even for non-reloadable types!
// TODO: This implementation doesn't check access modifiers on the class. So may allow
// instantiations that wouldn't be allowed by the JVM (e.g if constructor is public, but class is private)
// TODO: what about trying to instantiate an abstract class? should produce an error, does it?
Constructor<?> c;
try {
c = jlClassGetDeclaredConstructor(clazz);
}
catch (NoSuchMethodException e) {
// e.printStackTrace();
throw Exceptions.instantiation(clazz);
}
c = asAccessibleConstructor(c, true);
return jlrConstructorNewInstance(c);
}
public static Object jlrConstructorNewInstance(Constructor<?> c, Object... params) throws InstantiationException,
IllegalAccessException, IllegalArgumentException, InvocationTargetException, SecurityException,
NoSuchMethodException {
//Note: unlike for methods we don't need to handle the reloadable but not reloaded case specially, that is because there
// is no inheritance on constructors, so reloaded superclasses can affect method lookup in the same way.
Class<?> clazz = c.getDeclaringClass();
ReloadableType rtype = getReloadableTypeIfHasBeenReloaded(clazz);
if (rtype == null) {
c = asAccessibleConstructor(c, true);
//Nothing special to be done
return c.newInstance(params);
}
else {
// Constructor may have changed...
// this is the right thing to do but makes a mess of getDeclaredConstructors (and affects getDeclaredConstructor)
// // TODO should check about constructor changing
// rtype.getTypeDescriptor().getConstructor("").
boolean ctorChanged = rtype.getLiveVersion().hasConstructorChanged(
Utils.toConstructorDescriptor(c.getParameterTypes()));
if (!ctorChanged) {
// if we let the getDeclaredConstructor(s) code run as is, it may create invalid ctors, if we want to run the real one we should discover it here and use it.
// would it be cheaper to fix up getDeclaredConstructor to always return valid ones if we are going to use them, or should we intercept here? probably the former...
c = asAccessibleConstructor(c, true);
return c.newInstance(params);
}
asAccessibleConstructor(c, false);
CurrentLiveVersion clv = rtype.getLiveVersion();
Method executor = clv.getExecutorMethod(rtype.getCurrentConstructor(Type.getConstructorDescriptor(c)));
Constructor<?> magicConstructor = clazz.getConstructor(C.class);
Object instance = magicConstructor.newInstance((Object) null);
Object[] instanceAndParams;
if (params == null || params.length == 0) {
instanceAndParams = new Object[] { instance };
}
else {
//Must add instance as first param: executor is a static method.
instanceAndParams = new Object[params.length + 1];
instanceAndParams[0] = instance;
System.arraycopy(params, 0, instanceAndParams, 1, params.length);
}
executor.invoke(null, instanceAndParams);
return instance;
}
}
// private static String toString(Object... params) {
// if (params == null) {
// return "null";
// }
// StringBuilder s = new StringBuilder();
// for (Object param : params) {
// s.append(param).append(" ");
// }
// return "[" + s.toString().trim() + "]";
// }
@SuppressWarnings({ "rawtypes", "unchecked" })
public static Object jlrMethodInvoke(Method method, Object target, Object... params)
throws IllegalArgumentException,
IllegalAccessException, InvocationTargetException {
// System.out.println("> jlrMethodInvoke:method=" + method + " target=" + target + " params=" + toString(params));
Class declaringClass = method.getDeclaringClass();
if (declaringClass == Class.class) {
String mname = method.getName();
try {
if (mname.equals("getFields")) {
return jlClassGetFields((Class) target);
}
else if (mname.equals("getDeclaredFields")) {
return jlClassGetDeclaredFields((Class) target);
}
else if (mname.equals("getDeclaredField")) {
return jlClassGetDeclaredField((Class) target, (String) params[0]);
}
else if (mname.equals("getField")) {
return jlClassGetField((Class) target, (String) params[0]);
}
else if (mname.equals("getConstructors")) {
return jlClassGetConstructors((Class) target);
}
else if (mname.equals("getDeclaredConstructors")) {
return jlClassGetDeclaredConstructors((Class) target);
}
else if (mname.equals("getDeclaredMethod")) {
return jlClassGetDeclaredMethod((Class) target, (String) params[0], (Class[]) params[1]);
}
else if (mname.equals("getDeclaredMethods")) {
return jlClassGetDeclaredMethods((Class) target);
}
else if (mname.equals("getMethod")) {
return jlClassGetMethod((Class) target, (String) params[0], (Class[]) params[1]);
}
else if (mname.equals("getMethods")) {
return jlClassGetMethods((Class) target);
}
else if (mname.equals("getConstructor")) {
return jlClassGetConstructor((Class) target, (Class[]) params[0]);
}
else if (mname.equals("getDeclaredConstructor")) {
return jlClassGetDeclaredConstructor((Class) target, (Class[]) params[0]);
}
else if (mname.equals("getModifiers")) {
return jlClassGetModifiers((Class) target);
}
else if (mname.equals("isAnnotationPresent")) {
return jlClassIsAnnotationPresent((Class) target, (Class<? extends Annotation>) params[0]);
}
else if (mname.equals("newInstance")) {
return jlClassNewInstance((Class) target);
}
else if (mname.equals("getDeclaredAnnotations")) {
return jlClassGetDeclaredAnnotations((Class) target);
}
else if (mname.equals("getAnnotation")) {
return jlClassGetAnnotation((Class) target, (Class) params[0]);
}
else if (mname.equals("getAnnotations")) {
return jlClassGetAnnotations((Class) target);
}
}
catch (NoSuchMethodException nsme) {
throw new InvocationTargetException(nsme);
}
catch (NoSuchFieldException nsfe) {
throw new InvocationTargetException(nsfe);
}
catch (InstantiationException ie) {
throw new InvocationTargetException(ie);
}
}
else if (declaringClass == Method.class) {
String mname = method.getName();
if (mname.equals("invoke")) {
return jlrMethodInvoke((Method) target, params[0], (Object[]) params[1]);
}
else if (mname.equals("getAnnotation")) {
return jlrMethodGetAnnotation((Method) target, (Class) params[0]);
}
else if (mname.equals("getAnnotations")) {
return jlrMethodGetAnnotations((Method) target);
}
else if (mname.equals("getDeclaredAnnotations")) {
return jlrMethodGetDeclaredAnnotations((Method) target);
}
else if (mname.equals("getParameterAnnotations")) {
return jlrMethodGetParameterAnnotations((Method) target);
}
else if (mname.equals("isAnnotationPresent")) {
return jlrMethodIsAnnotationPresent((Method) target, (Class) params[0]);
}
}
else if (declaringClass == Constructor.class) {
String mname = method.getName();
try {
if (mname.equals("getAnnotation")) {
return jlrConstructorGetAnnotation((Constructor) target, (Class) params[0]);
}
else if (mname.equals("newInstance")) {
return jlrConstructorNewInstance((Constructor) target, (Object[]) params[0]);
}
else if (mname.equals("getAnnotations")) {
return jlrConstructorGetAnnotations((Constructor) target);
}
else if (mname.equals("getDeclaredAnnotations")) {
return jlrConstructorGetDeclaredAnnotations((Constructor) target);
}
else if (mname.equals("isAnnotationPresent")) {
return jlrConstructorIsAnnotationPresent((Constructor) target, (Class) params[0]);
}
else if (mname.equals("getParameterAnnotations")) {
return jlrConstructorGetParameterAnnotations((Constructor) target);
}
}
catch (InstantiationException ie) {
throw new InvocationTargetException(ie);
}
catch (NoSuchMethodException nsme) {
throw new InvocationTargetException(nsme);
}
}
else if (declaringClass == Field.class) {
String mname = method.getName();
if (mname.equals("set")) {
jlrFieldSet((Field) target, params[0], params[1]);
return null;
}
else if (mname.equals("setBoolean")) {
jlrFieldSetBoolean((Field) target, params[0], (Boolean) params[1]);
return null;
}
else if (mname.equals("setByte")) {
jlrFieldSetByte((Field) target, params[0], (Byte) params[1]);
return null;
}
else if (mname.equals("setChar")) {
jlrFieldSetChar((Field) target, params[0], (Character) params[1]);
return null;
}
else if (mname.equals("setFloat")) {
jlrFieldSetFloat((Field) target, params[0], (Float) params[1]);
return null;
}
else if (mname.equals("setShort")) {
jlrFieldSetShort((Field) target, params[0], (Short) params[1]);
return null;
}
else if (mname.equals("setLong")) {
jlrFieldSetLong((Field) target, params[0], (Long) params[1]);
return null;
}
else if (mname.equals("setDouble")) {
jlrFieldSetDouble((Field) target, params[0], (Double) params[1]);
return null;
}
else if (mname.equals("setInt")) {
jlrFieldSetInt((Field) target, params[0], (Integer) params[1]);
return null;
}
else if (mname.equals("get")) {
return jlrFieldGet((Field) target, params[0]);
}
else if (mname.equals("getByte")) {
return jlrFieldGetByte((Field) target, params[0]);
}
else if (mname.equals("getChar")) {
return jlrFieldGetChar((Field) target, params[0]);
}
else if (mname.equals("getDouble")) {
return jlrFieldGetDouble((Field) target, params[0]);
}
else if (mname.equals("getBoolean")) {
return jlrFieldGetBoolean((Field) target, params[0]);
}
else if (mname.equals("getLong")) {
return jlrFieldGetLong((Field) target, params[0]);
}
else if (mname.equals("getFloat")) {
return jlrFieldGetFloat((Field) target, params[0]);
}
else if (mname.equals("getInt")) {
return jlrFieldGetInt((Field) target, params[0]);
}
else if (mname.equals("getShort")) {
return jlrFieldGetShort((Field) target, params[0]);
}
else if (mname.equals("getAnnotations")) {
return jlrFieldGetAnnotations((Field) target);
}
else if (mname.equals("getDeclaredAnnotations")) {
return jlrFieldGetDeclaredAnnotations((Field) target);
}
else if (mname.equals("isAnnotationPresent")) {
return jlrFieldIsAnnotationPresent((Field) target, (Class) params[0]);
}
else if (mname.equals("getAnnotation")) {
return jlrFieldGetAnnotation((Field) target, (Class) params[0]);
}
}
else if (declaringClass == AccessibleObject.class) {
String mname = method.getName();
if (mname.equals("isAnnotationPresent")) {
if (target instanceof Constructor) {
// TODO what about null target - how should things go bang?
return jlrConstructorIsAnnotationPresent((Constructor) target, (Class) params[0]);
}
else if (target instanceof Method) {
return jlrMethodIsAnnotationPresent((Method) target, (Class) params[0]);
}
else if (target instanceof Field) {
return jlrFieldIsAnnotationPresent((Field) target, (Class) params[0]);
}
}
else if (mname.equals("getAnnotations")) {
if (target instanceof Constructor) {
return jlrConstructorGetAnnotations((Constructor) target);
}
else if (target instanceof Method) {
return jlrMethodGetAnnotations((Method) target);
}
else if (target instanceof Field) {
return jlrFieldGetAnnotations((Field) target);
}
}
else if (mname.equals("getDeclaredAnnotations")) {
if (target instanceof Constructor) {
return jlrConstructorGetDeclaredAnnotations((Constructor) target);
}
else if (target instanceof Method) {
return jlrMethodGetDeclaredAnnotations((Method) target);
}
else if (target instanceof Field) {
return jlrFieldGetDeclaredAnnotations((Field) target);
}
}
else if (mname.equals("getAnnotation")) {
if (target instanceof Constructor) {
return jlrConstructorGetAnnotation((Constructor) target, (Class) params[0]);
}
else if (target instanceof Method) {
return jlrMethodGetAnnotation((Method) target, (Class) params[0]);
}
else if (target instanceof Field) {
return jlrFieldGetAnnotation((Field) target, (Class) params[0]);
}
}
}
else if (declaringClass == AnnotatedElement.class) {
String mname = method.getName();
if (mname.equals("isAnnotationPresent")) {
if (target instanceof Constructor) {
// TODO what about null target - how should things go bang?
return jlrConstructorIsAnnotationPresent((Constructor) target, (Class) params[0]);
}
else if (target instanceof Method) {
return jlrMethodIsAnnotationPresent((Method) target, (Class) params[0]);
}
else if (target instanceof Field) {
return jlrFieldIsAnnotationPresent((Field) target, (Class) params[0]);
}
}
else if (mname.equals("getAnnotations")) {
if (target instanceof Constructor) {
return jlrConstructorGetAnnotations((Constructor) target);
}
else if (target instanceof Method) {
return jlrMethodGetAnnotations((Method) target);
}
else if (target instanceof Field) {
return jlrFieldGetAnnotations((Field) target);
}
}
else if (mname.equals("getDeclaredAnnotations")) {
if (target instanceof Constructor) {
return jlrConstructorGetDeclaredAnnotations((Constructor) target);
}
else if (target instanceof Method) {
return jlrMethodGetDeclaredAnnotations((Method) target);
}
else if (target instanceof Field) {
return jlrFieldGetDeclaredAnnotations((Field) target);
}
}
else if (mname.equals("getAnnotation")) {
if (target instanceof Constructor) {
return jlrConstructorGetAnnotation((Constructor) target, (Class) params[0]);
}
else if (target instanceof Method) {
return jlrMethodGetAnnotation((Method) target, (Class) params[0]);
}
else if (target instanceof Field) {
return jlrFieldGetAnnotation((Field) target, (Class) params[0]);
}
}
}
// Even though we tinker with the visibility of methods, we don't damage private ones (which would really cause chaos if we tried
// to allow the JVM to do the dispatch). That means this should be OK:
if (TypeRegistry.nothingReloaded) {
method = asAccessibleMethod(null, method, target, true);
return method.invoke(target, params);
}
ReloadableType declaringType = getRType(declaringClass);
if (declaringType == null) {
//Not reloadable...
method = asAccessibleMethod(declaringType, method, target, true);
return method.invoke(target, params);
}
else {
//Reloadable...
asAccessibleMethod(declaringType, method, target, false);
int mods = method.getModifiers();
Invoker invoker;
if ((mods & (Modifier.STATIC | Modifier.PRIVATE)) != 0) {
//These methods are dispatched statically
MethodProvider methods = MethodProvider.create(declaringType);
invoker = methods.staticLookup(mods, method.getName(), Type.getMethodDescriptor(method));
}
else {
//These methods are dispatched dynamically
ReloadableType targetType = getRType(target.getClass()); //NPE possible but is what should happen here!
if (targetType == null) {
if (GlobalConfiguration.verboseMode) {
System.out.println("UNEXPECTED: Subtype '"
+ target.getClass().getName()
+ "' of reloadable type "
+ method.getDeclaringClass().getName()
+ " is not reloadable: may not see changes reloaded in this hierarchy");
}
method = asAccessibleMethod(declaringType, method, target, true);
return method.invoke(target, params);
}
MethodProvider methods = MethodProvider.create(targetType); //use target not declaring type for Dynamic lookkup
invoker = methods.dynamicLookup(mods, method.getName(), Type.getMethodDescriptor(method));
}
return invoker.invoke(target, params);
}
}
public static boolean jlrMethodIsAnnotationPresent(Method method, Class<? extends Annotation> annotClass) {
return jlrMethodGetAnnotation(method, annotClass) != null;
}
public static Annotation jlrMethodGetAnnotation(Method method, Class<? extends Annotation> annotClass) {
ReloadableType rtype = getReloadableTypeIfHasBeenReloaded(method.getDeclaringClass());
if (rtype == null) {
//Nothing special to be done
return method.getAnnotation(annotClass);
}
else {
if (annotClass == null) {
throw new NullPointerException();
}
// Method could have changed...
Annotation[] annots = jlrMethodGetDeclaredAnnotations(method);
for (Annotation annotation : annots) {
if (annotClass.equals(annotation.annotationType())) {
return annotation;
}
}
return null;
}
}
public static Annotation[] jlrAnnotatedElementGetAnnotations(AnnotatedElement elem) {
if (elem instanceof Class<?>) {
return jlClassGetAnnotations((Class<?>) elem);
}
else if (elem instanceof AccessibleObject) {
return jlrAccessibleObjectGetAnnotations((AccessibleObject) elem);
}
else {
//Don't know what it is... not something we handle anyway
return elem.getAnnotations();
}
}
public static Annotation[] jlrAnnotatedElementGetDeclaredAnnotations(AnnotatedElement elem) {
if (elem instanceof Class<?>) {
return jlClassGetDeclaredAnnotations((Class<?>) elem);
}
else if (elem instanceof AccessibleObject) {
return jlrAccessibleObjectGetDeclaredAnnotations((AccessibleObject) elem);
}
else {
//Don't know what it is... not something we handle anyway
return elem.getDeclaredAnnotations();
}
}
public static Annotation[] jlrAccessibleObjectGetDeclaredAnnotations(AccessibleObject obj) {
if (obj instanceof Method) {
return jlrMethodGetDeclaredAnnotations((Method) obj);
}
else if (obj instanceof Field) {
return jlrFieldGetDeclaredAnnotations((Field) obj);
}
else if (obj instanceof Constructor<?>) {
return jlrConstructorGetDeclaredAnnotations((Constructor<?>) obj);
}
else {
//Some other type of member which we don't support reloading...
return obj.getDeclaredAnnotations();
}
}
public static Annotation[] jlrFieldGetDeclaredAnnotations(Field field) {
ReloadableType rtype = getReloadableTypeIfHasBeenReloaded(field.getDeclaringClass());
if (rtype == null) {
//Nothing special to be done
return field.getDeclaredAnnotations();
}
else {
// Field could have changed...
CurrentLiveVersion clv = rtype.getLiveVersion();
Field executor;
try {
executor = clv.getExecutorField(field.getName());
return executor.getAnnotations();
}
catch (Exception e) {
throw new IllegalStateException(e);
}
}
}
public static boolean jlrFieldIsAnnotationPresent(Field field, Class<? extends Annotation> annotType) {
if (annotType == null) {
throw new NullPointerException();
}
ReloadableType rtype = getReloadableTypeIfHasBeenReloaded(field.getDeclaringClass());
if (rtype == null) {
//Nothing special to be done
return field.isAnnotationPresent(annotType);
}
else {
// Field could have changed...
CurrentLiveVersion clv = rtype.getLiveVersion();
try {
Field executor = clv.getExecutorField(field.getName());
return executor.isAnnotationPresent(annotType);
}
catch (Exception e) {
throw new IllegalStateException(e);
}
}
}
public static Annotation[] jlrFieldGetAnnotations(Field field) {
//Fields do not inherit annotations so we can just call...
return jlrFieldGetDeclaredAnnotations(field);
}
public static Annotation[] jlrAccessibleObjectGetAnnotations(AccessibleObject obj) {
if (obj instanceof Method) {
return jlrMethodGetAnnotations((Method) obj);
}
else if (obj instanceof Field) {
return jlrFieldGetAnnotations((Field) obj);
}
else if (obj instanceof Constructor<?>) {
return jlrConstructorGetAnnotations((Constructor<?>) obj);
}
else {
//Some other type of member which we don't support reloading...
// (actually there are really no other cases any more!)
return obj.getAnnotations();
}
}
public static Annotation[] jlrConstructorGetAnnotations(Constructor<?> c) {
return jlrConstructorGetDeclaredAnnotations(c);
}
public static Annotation[] jlrConstructorGetDeclaredAnnotations(Constructor<?> c) {
ReloadableType rtype = getReloadableTypeIfHasBeenReloaded(c.getDeclaringClass());
if (rtype == null) {
//Nothing special to be done
return c.getDeclaredAnnotations();
}
else {
// Constructor could have changed...
CurrentLiveVersion clv = rtype.getLiveVersion();
Method executor = clv.getExecutorMethod(rtype.getCurrentConstructor(Type.getConstructorDescriptor(c)));
return executor.getAnnotations();
}
}
public static Annotation jlrConstructorGetAnnotation(Constructor<?> c, Class<? extends Annotation> annotType) {
ReloadableType rtype = getReloadableTypeIfHasBeenReloaded(c.getDeclaringClass());
if (rtype == null) {
//Nothing special to be done
return c.getAnnotation(annotType);
}
else {
// Constructor could have changed...
CurrentLiveVersion clv = rtype.getLiveVersion();
Method executor = clv.getExecutorMethod(rtype.getCurrentConstructor(Type.getConstructorDescriptor(c)));
return executor.getAnnotation(annotType);
}
}
public static Annotation[][] jlrConstructorGetParameterAnnotations(Constructor<?> c) {
ReloadableType rtype = getReloadableTypeIfHasBeenReloaded(c.getDeclaringClass());
if (rtype == null) {
//Nothing special to be done
return c.getParameterAnnotations();
}
else {
// Method could have changed...
CurrentLiveVersion clv = rtype.getLiveVersion();
MethodMember currentConstructor = rtype.getCurrentConstructor(Type.getConstructorDescriptor(c));
Method executor = clv.getExecutorMethod(currentConstructor);
Annotation[][] result = executor.getParameterAnnotations();
//Constructor executor methods have an extra param.
//Though extra param is added to front... annotations aren't being moved so we have to actually drop
//the *last* array element
result = Utils.arrayCopyOf(result, result.length - 1);
return result;
}
}
public static boolean jlrConstructorIsAnnotationPresent(Constructor<?> c, Class<? extends Annotation> annotType) {
ReloadableType rtype = getReloadableTypeIfHasBeenReloaded(c.getDeclaringClass());
if (rtype == null) {
//Nothing special to be done
return c.isAnnotationPresent(annotType);
}
else {
// Constructor could have changed...
CurrentLiveVersion clv = rtype.getLiveVersion();
Method executor = clv.getExecutorMethod(rtype.getCurrentConstructor(Type.getConstructorDescriptor(c)));
return executor.isAnnotationPresent(annotType);
}
}
public static Annotation jlrFieldGetAnnotation(Field field, Class<? extends Annotation> annotType) {
if (annotType == null) {
throw new NullPointerException();
}
ReloadableType rtype = getReloadableTypeIfHasBeenReloaded(field.getDeclaringClass());
if (rtype == null) {
//Nothing special to be done
return field.getAnnotation(annotType);
}
else {
// Field could have changed...
CurrentLiveVersion clv = rtype.getLiveVersion();
try {
Field executor = clv.getExecutorField(field.getName());
return executor.getAnnotation(annotType);
}
catch (Exception e) {
throw new IllegalStateException(e);
}
}
}
public static Annotation[] jlrMethodGetAnnotations(Method method) {
return jlrMethodGetDeclaredAnnotations(method);
}
public static boolean jlrAnnotatedElementIsAnnotationPresent(AnnotatedElement elem,
Class<? extends Annotation> annotType) {
if (elem instanceof Class<?>) {
return jlClassIsAnnotationPresent((Class<?>) elem, annotType);
}
else if (elem instanceof AccessibleObject) {
return jlrAccessibleObjectIsAnnotationPresent((AccessibleObject) elem, annotType);
}
else {
//Don't know what it is... not something we handle anyway
return elem.isAnnotationPresent(annotType);
}
}
public static boolean jlrAccessibleObjectIsAnnotationPresent(AccessibleObject obj,
Class<? extends Annotation> annotType) {
if (obj instanceof Method) {
return jlrMethodIsAnnotationPresent((Method) obj, annotType);
}
else if (obj instanceof Field) {
return jlrFieldIsAnnotationPresent((Field) obj, annotType);
}
else if (obj instanceof Constructor) {
return jlrConstructorIsAnnotationPresent((Constructor<?>) obj, annotType);
}
else {
//Some other type of member which we don't support reloading...
return obj.isAnnotationPresent(annotType);
}
}
public static Annotation jlrAnnotatedElementGetAnnotation(AnnotatedElement elem,
Class<? extends Annotation> annotType) {
if (elem instanceof Class<?>) {
return jlClassGetAnnotation((Class<?>) elem, annotType);
}
else if (elem instanceof AccessibleObject) {
return jlrAccessibleObjectGetAnnotation((AccessibleObject) elem, annotType);
}
else {
//Don't know what it is... not something we handle anyway
// Note: only thing it can be is probably java.lang.Package
return elem.getAnnotation(annotType);
}
}
public static Annotation jlrAccessibleObjectGetAnnotation(AccessibleObject obj,
Class<? extends Annotation> annotType) {
if (obj instanceof Method) {
return jlrMethodGetAnnotation((Method) obj, annotType);
}
else if (obj instanceof Field) {
return jlrFieldGetAnnotation((Field) obj, annotType);
}
else if (obj instanceof Constructor<?>) {
return jlrConstructorGetAnnotation((Constructor<?>) obj, annotType);
}
else {
//Some other type of member which we don't support reloading...
return obj.getAnnotation(annotType);
}
}
public static Field jlClassGetField(Class<?> clazz, String name) throws SecurityException, NoSuchFieldException {
ReloadableType rtype = getRType(clazz);
if (name.startsWith(Constants.PREFIX)) {
throw Exceptions.noSuchFieldException(name);
}
if (rtype == null) {
//Not reloadable
return clazz.getField(name);
}
else {
//Reloadable
Field f = GetFieldLookup.lookup(rtype, name);
if (f != null) {
return f;
}
throw Exceptions.noSuchFieldException(name);
}
}
public static Field jlClassGetDeclaredField(Class<?> clazz, String name) throws SecurityException,
NoSuchFieldException {
ReloadableType rtype = getRType(clazz);
if (rtype == null) {
return clazz.getDeclaredField(name);
}
else if (name.startsWith(Constants.PREFIX)) {
throw Exceptions.noSuchFieldException(name);
}
else if (!rtype.hasBeenReloaded()) {
Field f = clazz.getDeclaredField(name);
fixModifier(rtype.getLatestTypeDescriptor(), f);
return f;
}
else {
Field f = GetDeclaredFieldLookup.lookup(rtype, name);
if (f == null) {
throw Exceptions.noSuchFieldException(name);
}
else {
return f;
}
}
}
public static Field[] jlClassGetDeclaredFields(Class<?> clazz) {
Field[] fields = clazz.getDeclaredFields();
ReloadableType rtype = getRType(clazz);
if (rtype == null) {
return fields;
}
else {
if (!rtype.hasBeenReloaded()) {
//Not reloaded yet...
fields = removeMetaFields(fields);
fixModifiers(rtype, fields);
return fields;
}
else {
// Was reloaded, it's up to us to create the field objects
TypeDescriptor typeDesc = rtype.getLatestTypeDescriptor();
FieldMember[] members = typeDesc.getFields();
fields = new Field[members.length];
int i = 0;
for (FieldMember f : members) {
String fieldTypeDescriptor = f.getDescriptor();
Class<?> type;
try {
type = Utils.toClass(Type.getType(fieldTypeDescriptor), rtype.typeRegistry.getClassLoader());
}
catch (ClassNotFoundException e) {
throw new IllegalStateException(e);
}
fields[i++] = JVM.newField(clazz, type, f.getModifiers(), f.getName(), f.getGenericSignature());
}
if (GlobalConfiguration.assertsMode) {
Utils.assertTrue(i == fields.length, "Bug: unexpected number of fields");
}
return fields;
}
}
}
/**
* Given a list of fields filter out those fields that are created by springloaded (leaving only the "genuine"
* fields)
*/
private static Field[] removeMetaFields(Field[] fields) {
Field[] realFields = new Field[fields.length - 1];
//We'll delete at least one, sometimes more than one field (because there's at least the r$type field).
int i = 0;
for (Field field : fields) {
if (!field.getName().startsWith(Constants.PREFIX)) {
realFields[i++] = field;
}
}
if (i < realFields.length) {
realFields = Utils.arrayCopyOf(realFields, i);
}
if (GlobalConfiguration.assertsMode) {
Utils.assertTrue(i == realFields.length, "Bug in removeMetaFields, created array of wrong length");
}
return realFields;
}
/**
* Although fields are not reloadable, we have to intercept this because otherwise we'll return the r$type field as
* a result here.
*
* @param clazz the class for which to retrieve the fields
* @return array of fields in the class
*/
public static Field[] jlClassGetFields(Class<?> clazz) {
ReloadableType rtype = getRType(clazz);
if (rtype == null) {
return clazz.getFields();
}
else {
List<Field> allFields = new ArrayList<Field>();
gatherFields(clazz, allFields, new HashSet<Class<?>>());
return allFields.toArray(new Field[allFields.size()]);
}
}
/**
* Gather up all (public) fields in an interface and all its super interfaces recursively.
*
* @param clazz the class for which to collect up fields
* @param collected a collector that has fields added to it as this method runs (recursively)
* @param visited a set recording which types have already been visited
*/
private static void gatherFields(Class<?> clazz, List<Field> collected, HashSet<Class<?>> visited) {
if (visited.contains(clazz)) {
return;
}
visited.add(clazz);
Field[] fields = jlClassGetDeclaredFields(clazz);
for (Field f : fields) {
if (Modifier.isPublic(f.getModifiers())) {
collected.add(f);
}
}
if (!clazz.isInterface()) {
Class<?> supr = clazz.getSuperclass();
if (supr != null) {
gatherFields(supr, collected, visited);
}
}
for (Class<?> itf : clazz.getInterfaces()) {
gatherFields(itf, collected, visited);
}
}
public static Object jlrFieldGet(Field field, Object target) throws IllegalArgumentException,
IllegalAccessException {
Class<?> clazz = field.getDeclaringClass();
ReloadableType rtype = getRType(clazz);
if (rtype == null) {
field = asAccessibleField(field, target, true);
return field.get(target);
}
else {
asAccessibleField(field, target, false);
return rtype.getField(target, field.getName(), Modifier.isStatic(field.getModifiers()));
}
}
public static int jlrFieldGetInt(Field field, Object target) throws IllegalAccessException {
Class<?> clazz = field.getDeclaringClass();
ReloadableType rtype = getRType(clazz);
if (rtype == null) {
field = asAccessibleField(field, target, true);
return field.getInt(target);
}
else {
asAccessibleField(field, target, false);
typeCheckFieldGet(field, int.class);
Object value = rtype.getField(target, field.getName(), Modifier.isStatic(field.getModifiers()));
if (value instanceof Character) {
return ((Character) value).charValue();
}
else {
return ((Number) value).intValue();
}
}
}
public static byte jlrFieldGetByte(Field field, Object target) throws IllegalAccessException {
Class<?> clazz = field.getDeclaringClass();
ReloadableType rtype = getRType(clazz);
if (rtype == null) {
field = asAccessibleField(field, target, true);
return field.getByte(target);
}
else {
asAccessibleField(field, target, false);
typeCheckFieldGet(field, byte.class);
Object value = rtype.getField(target, field.getName(), Modifier.isStatic(field.getModifiers()));
return ((Number) value).byteValue();
}
}
public static char jlrFieldGetChar(Field field, Object target) throws IllegalAccessException {
Class<?> clazz = field.getDeclaringClass();
ReloadableType rtype = getRType(clazz);
if (rtype == null) {
field = asAccessibleField(field, target, true);
return field.getChar(target);
}
else {
asAccessibleField(field, target, false);
typeCheckFieldGet(field, char.class);
Object value = rtype.getField(target, field.getName(), Modifier.isStatic(field.getModifiers()));
return ((Character) value).charValue();
}
}
public static short jlrFieldGetShort(Field field, Object target) throws IllegalAccessException {
Class<?> clazz = field.getDeclaringClass();
ReloadableType rtype = getRType(clazz);
if (rtype == null) {
field = asAccessibleField(field, target, true);
return field.getShort(target);
}
else {
asAccessibleField(field, target, false);
typeCheckFieldGet(field, short.class);
Object value = rtype.getField(target, field.getName(), Modifier.isStatic(field.getModifiers()));
if (value instanceof Character) {
return (short) ((Character) value).charValue();
}
else {
return ((Number) value).shortValue();
}
}
}
public static double jlrFieldGetDouble(Field field, Object target) throws IllegalAccessException {
Class<?> clazz = field.getDeclaringClass();
ReloadableType rtype = getRType(clazz);
if (rtype == null) {
field = asAccessibleField(field, target, true);
return field.getDouble(target);
}
else {
asAccessibleField(field, target, false);
typeCheckFieldGet(field, double.class);
Object value = rtype.getField(target, field.getName(), Modifier.isStatic(field.getModifiers()));
if (value instanceof Character) {
return ((Character) value).charValue();
}
else {
return ((Number) value).doubleValue();
}
}
}
public static float jlrFieldGetFloat(Field field, Object target) throws IllegalAccessException {
Class<?> clazz = field.getDeclaringClass();
ReloadableType rtype = getRType(clazz);
if (rtype == null) {
field = asAccessibleField(field, target, true);
return field.getFloat(target);
}
else {
asAccessibleField(field, target, false);
typeCheckFieldGet(field, float.class);
Object value = rtype.getField(target, field.getName(), Modifier.isStatic(field.getModifiers()));
if (value instanceof Character) {
return ((Character) value).charValue();
}
else {
return ((Number) value).floatValue();
}
}
}
public static boolean jlrFieldGetBoolean(Field field, Object target) throws IllegalAccessException {
Class<?> clazz = field.getDeclaringClass();
ReloadableType rtype = getRType(clazz);
if (rtype == null) {
field = asAccessibleField(field, target, true);
return field.getBoolean(target);
}
else {
asAccessibleField(field, target, false);
typeCheckFieldGet(field, boolean.class);
Object value = rtype.getField(target, field.getName(), Modifier.isStatic(field.getModifiers()));
return ((Boolean) value).booleanValue();
}
}
public static long jlrFieldGetLong(Field field, Object target) throws IllegalAccessException {
Class<?> clazz = field.getDeclaringClass();
ReloadableType rtype = getRType(clazz);
if (rtype == null) {
field = asAccessibleField(field, target, true);
return field.getLong(target);
}
else {
asAccessibleField(field, target, false);
typeCheckFieldGet(field, long.class);
Object value = rtype.getField(target, field.getName(), Modifier.isStatic(field.getModifiers()));
if (value instanceof Character) {
return ((Character) value).charValue();
}
else {
return ((Number) value).longValue();
}
}
}
private static void typeCheckFieldGet(Field field, Class<?> returnType) {
Class<?> fieldType = field.getType();
if (!Utils.isConvertableFrom(returnType, fieldType)) {
throw Exceptions.illegalGetFieldType(field, returnType);
}
}
public static void jlrFieldSet(Field field, Object target, Object value) throws IllegalAccessException {
Class<?> clazz = field.getDeclaringClass();
ReloadableType rtype = getRType(clazz);
if (rtype == null) {
// Not reloadable
field = asSetableField(field, target, valueType(value), value, true);
field.set(target, value);
}
else {
asSetableField(field, target, valueType(value), value, false);
rtype.setField(target, field.getName(), Modifier.isStatic(field.getModifiers()), value);
}
}
public static void jlrFieldSetInt(Field field, Object target, int value) throws IllegalAccessException {
Class<?> clazz = field.getDeclaringClass();
ReloadableType rtype = getRType(clazz);
if (rtype == null) {
// Not reloadable
field = asSetableField(field, target, int.class, value, true);
field.setInt(target, value);
}
else {
asSetableField(field, target, int.class, value, false);
rtype.setField(target, field.getName(), Modifier.isStatic(field.getModifiers()), value);
}
}
public static void jlrFieldSetByte(Field field, Object target, byte value) throws IllegalAccessException {
Class<?> clazz = field.getDeclaringClass();
ReloadableType rtype = getRType(clazz);
if (rtype == null) {
// Not reloadable
field = asSetableField(field, target, byte.class, value, true);
field.setByte(target, value);
}
else {
asSetableField(field, target, byte.class, value, false);
rtype.setField(target, field.getName(), Modifier.isStatic(field.getModifiers()), value);
}
}
public static void jlrFieldSetChar(Field field, Object target, char value) throws IllegalAccessException {
Class<?> clazz = field.getDeclaringClass();
ReloadableType rtype = getRType(clazz);
if (rtype == null) {
// Not reloadable
field = asSetableField(field, target, char.class, value, true);
field.setChar(target, value);
}
else {
asSetableField(field, target, char.class, value, false);
rtype.setField(target, field.getName(), Modifier.isStatic(field.getModifiers()), value);
}
}
public static void jlrFieldSetShort(Field field, Object target, short value) throws IllegalAccessException {
Class<?> clazz = field.getDeclaringClass();
ReloadableType rtype = getRType(clazz);
if (rtype == null) {
// Not reloadable
field = asSetableField(field, target, short.class, value, true);
field.setShort(target, value);
}
else {
asSetableField(field, target, short.class, value, false);
rtype.setField(target, field.getName(), Modifier.isStatic(field.getModifiers()), value);
}
}
public static void jlrFieldSetDouble(Field field, Object target, double value) throws IllegalAccessException {
Class<?> clazz = field.getDeclaringClass();
ReloadableType rtype = getRType(clazz);
if (rtype == null) {
// Not reloadable
field = asSetableField(field, target, double.class, value, true);
field.setDouble(target, value);
}
else {
asSetableField(field, target, double.class, value, false);
rtype.setField(target, field.getName(), Modifier.isStatic(field.getModifiers()), value);
}
}
public static void jlrFieldSetFloat(Field field, Object target, float value) throws IllegalAccessException {
Class<?> clazz = field.getDeclaringClass();
ReloadableType rtype = getRType(clazz);
if (rtype == null) {
// Not reloadable
field = asSetableField(field, target, float.class, value, true);
field.setFloat(target, value);
}
else {
asSetableField(field, target, float.class, value, false);
rtype.setField(target, field.getName(), Modifier.isStatic(field.getModifiers()), value);
}
}
public static void jlrFieldSetLong(Field field, Object target, long value) throws IllegalAccessException {
Class<?> clazz = field.getDeclaringClass();
ReloadableType rtype = getRType(clazz);
if (rtype == null) {
// Not reloadable
field = asSetableField(field, target, long.class, value, true);
field.setLong(target, value);
}
else {
asSetableField(field, target, long.class, value, false);
rtype.setField(target, field.getName(), Modifier.isStatic(field.getModifiers()), value);
}
}
public static void jlrFieldSetBoolean(Field field, Object target, boolean value) throws IllegalAccessException {
Class<?> clazz = field.getDeclaringClass();
ReloadableType rtype = getRType(clazz);
if (rtype == null) {
// Not reloadable
field = asSetableField(field, target, boolean.class, value, true);
field.setBoolean(target, value);
}
else {
asSetableField(field, target, boolean.class, value, false);
rtype.setField(target, field.getName(), Modifier.isStatic(field.getModifiers()), value);
}
}
/**
* What's the "boxed" version of a given primtive type.
*/
private static Class<?> boxTypeFor(Class<?> primType) {
if (primType == int.class) {
return Integer.class;
}
else if (primType == boolean.class) {
return Boolean.class;
}
else if (primType == byte.class) {
return Byte.class;
}
else if (primType == char.class) {
return Character.class;
}
else if (primType == double.class) {
return Double.class;
}
else if (primType == float.class) {
return Float.class;
}
else if (primType == long.class) {
return Long.class;
}
else if (primType == short.class) {
return Short.class;
}
throw new IllegalStateException("Forgotten a case in this method?");
}
}
| 27,478 |
775 | <gh_stars>100-1000
package com.badoo.chateau.example.data.repos.session;
import com.badoo.chateau.example.Broadcaster;
import com.badoo.chateau.data.models.BaseUser;
import com.badoo.chateau.example.data.util.ParseHelper;
import com.badoo.unittest.ModelTestHelper;
import com.badoo.unittest.rx.BaseRxTestCase;
import com.parse.ParseUser;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.mockito.runners.MockitoJUnitRunner;
import rx.Observable;
import rx.observers.TestSubscriber;
import static org.hamcrest.core.Is.is;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertThat;
import static org.mockito.Matchers.any;
import static org.mockito.Matchers.eq;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.when;
@RunWith(MockitoJUnitRunner.class)
public class ParseSessionDataSourceTest extends BaseRxTestCase {
private static final String USER_ID = "userId";
private static final String USER_NAME = "username";
private static final String PASSWORD = "password";
private static final String DISPLAY_NAME = "displayName";
private ParseHelper mParseHelper;
private Broadcaster mBroadcaster;
private ParseSessionDataSource mTarget;
@Before
public void setup() {
mParseHelper = mock(ParseHelper.class);
mBroadcaster = mock(Broadcaster.class);
mTarget = new ParseSessionDataSource(mBroadcaster, mParseHelper);
}
@Test
public void signIn() {
// Given
ParseUser currentUser = ModelTestHelper.createParseUser(USER_ID);
when(mParseHelper.signIn(USER_NAME, PASSWORD)).thenReturn(Observable.just(currentUser));
// When
TestSubscriber<BaseUser> testSubscriber = executeTarget(mTarget.signIn(new SessionQuery.SignIn(USER_NAME, PASSWORD)));
// Then
assertThat(testSubscriber.getOnNextEvents().size(), is(1));
BaseUser user = testSubscriber.getOnNextEvents().get(0);
assertEquals(USER_ID, user.getUserId());
testSubscriber.assertCompleted();
verify(mBroadcaster).userSignedIn();
}
@Test
public void register() {
// Given
ParseUser currentUser = ModelTestHelper.createParseUser(USER_ID);
when(mParseHelper.signUp(eq(USER_NAME), eq(PASSWORD), any())).thenReturn(Observable.just(currentUser));
// When
TestSubscriber<BaseUser> testSubscriber = executeTarget(mTarget.register(new SessionQuery.Register(USER_NAME, DISPLAY_NAME, PASSWORD)));
// Then
assertThat(testSubscriber.getOnNextEvents().size(), is(1));
BaseUser user = testSubscriber.getOnNextEvents().get(0);
assertEquals(USER_ID, user.getUserId());
testSubscriber.assertCompleted();
verify(mBroadcaster).userSignedIn();
}
@Test
public void signOut() {
// Given
when(mParseHelper.signOut()).thenReturn(Observable.empty());
// When
TestSubscriber<Void> testSubscriber = executeTarget(mTarget.signOut());
// Then
testSubscriber.assertCompleted();
verify(mBroadcaster).userSignedOut();
}
} | 1,240 |
1,444 | package mage.cards.t;
import mage.MageInt;
import mage.abilities.common.EntersBattlefieldTriggeredAbility;
import mage.abilities.effects.common.GainLifeEffect;
import mage.cards.CardImpl;
import mage.cards.CardSetInfo;
import mage.constants.CardType;
import mage.constants.SubType;
import java.util.UUID;
/**
* @author TheElk801
*/
public final class TurntimberAscetic extends CardImpl {
public TurntimberAscetic(UUID ownerId, CardSetInfo setInfo) {
super(ownerId, setInfo, new CardType[]{CardType.CREATURE}, "{4}{G}");
this.subtype.add(SubType.GIANT);
this.subtype.add(SubType.CLERIC);
this.power = new MageInt(5);
this.toughness = new MageInt(4);
// When Turntimber Ascetic enters the battlefield, you gain 3 life.
this.addAbility(new EntersBattlefieldTriggeredAbility(new GainLifeEffect(3)));
}
private TurntimberAscetic(final TurntimberAscetic card) {
super(card);
}
@Override
public TurntimberAscetic copy() {
return new TurntimberAscetic(this);
}
}
| 415 |