content
stringlengths 4
1.04M
| lang
stringclasses 358
values | score
int64 0
5
| repo_name
stringlengths 5
114
| repo_path
stringlengths 4
229
| repo_licenses
sequencelengths 1
8
|
---|---|---|---|---|---|
import React from "react"
export default () => (
<>
<p data-testid="bio">A bio all about me.</p>
</>
)
| JSX | 3 | pipaliyajaydip/gatsby | e2e-tests/themes/development-runtime/src/gatsby-theme-about/templates/bio.jsx | [
"MIT"
] |
- dashboard: web_analytics_overview
title: Web Analytics Overview
layout: newspaper
preferred_viewer: dashboards
query_timezone: user_timezone
embed_style:
background_color: "#e8f1fa"
show_title: true
title_color: "#131414"
show_filters_bar: true
tile_text_color: gray
tile_separator_color: rgba(0, 0, 0, 0.05)
tile_border_radius: 3
show_tile_shadow: true
text_tile_text_color: ''
elements:
- title: Total Visitors
name: Total Visitors
model: thelook
explore: events
type: single_value
fields: [events.unique_visitors, events.event_week]
filters:
events.event_date: 2 weeks ago for 2 weeks
sorts: [events.event_week desc]
limit: 500
column_limit: 50
dynamic_fields: [{table_calculation: change, label: Change, expression: "${events.unique_visitors}-offset(${events.unique_visitors},1)"}]
query_timezone: America/Los_Angeles
font_size: medium
value_format: ''
text_color: black
colors: ["#1f78b4", "#a6cee3", "#33a02c", "#b2df8a", "#e31a1c", "#fb9a99", "#ff7f00",
"#fdbf6f", "#6a3d9a", "#cab2d6", "#b15928", "#edbc0e"]
show_single_value_title: true
show_comparison: true
comparison_type: change
comparison_reverse_colors: false
show_comparison_label: true
comparison_label: Weekly Change
single_value_title: Visitors Past Week
note_state: collapsed
note_display: below
note_text: ''
listen:
Browser: events.browser
Traffic Source: users.traffic_source
row: 0
col: 0
width: 6
height: 3
- title: Total Converted Visitors
name: Total Converted Visitors
model: thelook
explore: order_items
type: single_value
fields: [users.count]
sorts: [users.count desc]
limit: 500
font_size: medium
text_color: black
listen:
Traffic Source: users.traffic_source
Date: order_items.created_date
row: 0
col: 11
width: 5
height: 3
- title: Total Profit
name: Total Profit
model: thelook
explore: order_items
type: single_value
fields: [order_items.total_sale_price]
filters: {}
sorts: [orders.total_profit_k desc, order_items.total_sale_price desc]
limit: 500
query_timezone: America/Los_Angeles
font_size: medium
value_format: "$#,###"
text_color: black
colors: ["#1f78b4", "#a6cee3", "#33a02c", "#b2df8a", "#e31a1c", "#fb9a99", "#ff7f00",
"#fdbf6f", "#6a3d9a", "#cab2d6", "#b15928", "#edbc0e"]
color_palette: Default
note_state: expanded
note_display: below
note_text: ''
listen:
Traffic Source: users.traffic_source
Date: order_items.created_date
row: 0
col: 6
width: 5
height: 3
- title: Visits by Browser
name: Visits by Browser
model: thelook
explore: events
type: looker_pie
fields: [events.browser, events.count]
filters: {}
sorts: [events.count desc]
limit: 50
column_limit: 50
query_timezone: America/Los_Angeles
value_labels: legend
label_type: labPer
colors: ["#635189", "#8D7FB9", "#EA8A2F", "#e9b404", "#49cec1", "#a2dcf3", "#1ea8df",
"#7F7977"]
series_colors:
Chrome: "#5245ed"
Safari: "#a2dcf3"
Firefox: "#776fdf"
IE: "#1ea8df"
Other: "#49cec1"
show_null_labels: false
show_view_names: true
listen:
Browser: events.browser
Traffic Source: users.traffic_source
Date: events.event_date
row: 10
col: 16
width: 8
height: 8
- title: How Long do Visitors Spend on Website?
name: How Long do Visitors Spend on Website?
model: thelook
explore: events
type: looker_bar
fields: [sessions.duration_seconds_tier, sessions.count]
filters: {}
sorts: [sessions.duration_seconds_tier]
limit: 500
color_application:
collection_id: b43731d5-dc87-4a8e-b807-635bef3948e7
palette_id: fb7bb53e-b77b-4ab6-8274-9d420d3d73f3
options:
steps: 5
x_axis_gridlines: false
y_axis_gridlines: true
show_view_names: true
show_y_axis_labels: true
show_y_axis_ticks: true
y_axis_labels: [Number of Sessions]
y_axis_tick_density: default
y_axis_tick_density_custom: 5
show_x_axis_label: false
x_axis_label: Session Duration in Seconds
show_x_axis_ticks: true
y_axis_scale_mode: linear
x_axis_reversed: false
y_axis_reversed: false
plot_size_by_field: false
trellis: ''
stacking: normal
limit_displayed_rows: false
legend_position: center
colors: ["#8D7FB9"]
point_style: none
series_colors:
sessions.count: "#5245ed"
show_value_labels: false
label_density: 25
x_axis_scale: auto
y_axis_combined: true
ordering: none
show_null_labels: false
show_totals_labels: false
show_silhouette: false
totals_color: "#808080"
x_axis_label_rotation: -45
show_dropoff: false
listen:
Browser: events.browser
Traffic Source: users.traffic_source
Date: events.event_date
row: 0
col: 16
width: 8
height: 10
- title: Bounce Rate by Page
name: Bounce Rate by Page
model: thelook
explore: sessions
type: looker_column
fields: [events.event_type, events.bounce_rate, sessions.count]
filters:
events.event_type: "-Purchase,-Login,-Register,-History,-Cancel,-Return"
sessions.session_start_date: 7 days
sorts: [sessions.count desc]
limit: 10
x_axis_gridlines: false
y_axis_gridlines: true
show_view_names: false
show_y_axis_labels: true
show_y_axis_ticks: true
y_axis_tick_density: default
y_axis_tick_density_custom: 5
show_x_axis_label: false
show_x_axis_ticks: true
y_axis_scale_mode: linear
x_axis_reversed: false
y_axis_reversed: false
plot_size_by_field: false
trellis: ''
stacking: ''
limit_displayed_rows: false
legend_position: center
colors: ["#a2dcf3", "#64518A", "#8D7FB9"]
series_types:
events.bounce_rate: line
point_style: circle_outline
series_colors:
sessions.count: "#1ea8df"
series_labels:
events.bounce_rate: Bounce Rate by Page
events.count: Number of Page Views
show_value_labels: false
label_density: 10
x_axis_scale: auto
y_axis_combined: false
y_axis_orientation: [left, right]
ordering: none
show_null_labels: false
show_totals_labels: false
show_silhouette: false
totals_color: "#808080"
show_null_points: true
interpolation: linear
listen:
Browser: events.browser
Traffic Source: users.traffic_source
Date: events.event_date
row: 18
col: 0
width: 12
height: 7
- title: App Overview
name: App Overview
model: thelook
explore: events
type: table
fields: [product_viewed.brand, events.count, events.unique_visitors, sessions.count_purchase,
sessions.cart_to_checkout_conversion]
filters:
product_viewed.brand: "-NULL"
sorts: [events.count desc]
limit: 10
query_timezone: America/Los_Angeles
show_view_names: false
show_row_numbers: true
show_value_labels: true
show_null_labels: false
stacking: ''
x_axis_gridlines: false
y_axis_gridlines: true
show_y_axis_labels: true
show_y_axis_ticks: true
y_axis_tick_density: default
y_axis_tick_density_custom: 5
show_x_axis_label: true
show_x_axis_ticks: true
x_axis_scale: auto
y_axis_combined: true
series_labels:
events.count: Total Pageviews
y_axis_labels: [Total Pageviews]
x_axis_label: Brand Name
label_density: 25
legend_position: center
ordering: none
colors: ["#64518A", "#8D7FB9"]
hide_legend: false
show_dropoff: false
truncate_column_names: false
table_theme: gray
limit_displayed_rows: false
listen:
Browser: events.browser
Traffic Source: users.traffic_source
Date: events.event_date
row: 18
col: 12
width: 12
height: 7
- title: eCommerce Funnel
name: eCommerce Funnel
model: thelook
explore: sessions
type: looker_column
fields: [sessions.all_sessions, sessions.count_browse_or_later, sessions.count_product_or_later,
sessions.count_cart_or_later, sessions.count_purchase]
filters: {}
sorts: [sessions.all_sessions desc]
limit: 500
column_limit: 50
query_timezone: America/Los_Angeles
x_axis_gridlines: false
y_axis_gridlines: false
show_view_names: false
show_y_axis_labels: true
show_y_axis_ticks: true
y_axis_tick_density: default
y_axis_tick_density_custom: 5
show_x_axis_label: true
x_axis_label: ''
show_x_axis_ticks: false
y_axis_scale_mode: linear
x_axis_reversed: false
y_axis_reversed: false
plot_size_by_field: false
trellis: ''
stacking: ''
limit_displayed_rows: false
legend_position: center
colors: ["#5245ed", "#a2dcf3", "#776fdf", "#1ea8df", "#49cec1", "#776fdf", "#49cec1",
"#1ea8df", "#a2dcf3", "#776fdf", "#776fdf", "#635189"]
series_types: {}
point_style: circle
show_value_labels: true
label_density: 25
x_axis_scale: auto
y_axis_combined: true
ordering: none
show_null_labels: false
show_dropoff: true
show_totals_labels: false
show_silhouette: false
totals_color: "#808080"
leftAxisLabelVisible: false
leftAxisLabel: ''
rightAxisLabelVisible: true
rightAxisLabel: Sessions
barColors: ["#5245ed", "#49cec1"]
smoothedBars: true
orientation: automatic
labelPosition: left
percentType: total
percentPosition: inline
valuePosition: right
labelColorEnabled: false
labelColor: "#FFF"
show_null_points: true
interpolation: linear
listen:
Browser: events.browser
Traffic Source: users.traffic_source
Date: sessions.session_start_date
row: 3
col: 0
width: 11
height: 7
- title: Global Events
name: Global Events
model: thelook
explore: events
type: looker_map
fields: [events.approx_location, events.count]
filters: {}
sorts: [events.count desc]
limit: 1000
query_timezone: America/Los_Angeles
show_view_names: true
stacking: ''
show_value_labels: false
label_density: 25
legend_position: center
x_axis_gridlines: false
y_axis_gridlines: true
y_axis_combined: true
show_y_axis_labels: true
show_y_axis_ticks: true
y_axis_tick_density: default
y_axis_tick_density_custom: 5
show_x_axis_label: true
show_x_axis_ticks: true
x_axis_scale: auto
ordering: none
show_null_labels: false
loading: false
map_plot_mode: points
heatmap_gridlines: true
map_tile_provider: positron
map_position: fit_data
map_scale_indicator: 'off'
map_marker_type: circle
map_marker_icon_name: default
map_marker_radius_mode: proportional_value
map_marker_units: pixels
map_marker_proportional_scale_type: linear
map_marker_color_mode: fixed
show_legend: true
quantize_map_value_colors: false
map: world
map_projection: ''
quantize_colors: false
colors: [whitesmoke, "#64518A"]
outer_border_color: grey
inner_border_color: lightgrey
map_pannable: true
map_zoomable: true
map_marker_color: ["#1ea8df"]
listen:
Browser: events.browser
Traffic Source: users.traffic_source
Date: events.event_date
row: 10
col: 0
width: 16
height: 8
- title: Daily Session and User Count
name: Daily Session and User Count
model: thelook
explore: sessions
type: looker_line
fields: [sessions.session_start_date, sessions.count, sessions.overall_conversion]
sorts: [sessions.session_start_date]
limit: 500
column_limit: 50
query_timezone: America/Los_Angeles
x_axis_gridlines: false
y_axis_gridlines: true
show_view_names: false
show_y_axis_labels: false
show_y_axis_ticks: true
y_axis_tick_density: default
y_axis_tick_density_custom: 5
show_x_axis_label: false
show_x_axis_ticks: true
y_axis_scale_mode: linear
x_axis_reversed: false
y_axis_reversed: false
plot_size_by_field: false
trellis: ''
stacking: ''
limit_displayed_rows: false
hide_legend: false
legend_position: center
colors: ["#5245ed", "#1ea8df", "#353b49", "#49cec1", "#b3a0dd", "#db7f2a", "#706080",
"#a2dcf3", "#776fdf", "#e9b404", "#635189"]
point_style: circle_outline
show_value_labels: false
label_density: 25
x_axis_scale: auto
y_axis_combined: false
y_axis_orientation: [left, right]
show_null_points: true
interpolation: monotone
discontinuous_nulls: false
show_row_numbers: true
ordering: none
show_null_labels: false
listen:
Browser: events.browser
Traffic Source: users.traffic_source
Date: events.event_date
row: 25
col: 0
width: 24
height: 9
- title: Percent Purchasing Sessions
name: Percent Purchasing Sessions
model: thelook
explore: sessions
type: looker_pie
fields: [sessions.includes_purchase, sessions.count]
filters:
sessions.session_start_date: 7 days
sorts: [sessions.all_sessions desc, sessions.includes_purchase]
limit: 500
column_limit: 50
query_timezone: America/Los_Angeles
show_view_names: true
colors: ["#5245ed", "#a2dcf3"]
show_row_numbers: true
ordering: none
show_null_labels: false
value_labels: legend
label_type: labPer
stacking: normal
show_value_labels: false
label_density: 25
legend_position: center
x_axis_gridlines: false
y_axis_gridlines: true
y_axis_combined: true
show_y_axis_labels: true
show_y_axis_ticks: true
y_axis_tick_density: default
y_axis_tick_density_custom: 5
show_x_axis_label: true
show_x_axis_ticks: true
x_axis_scale: ordinal
point_style: circle_outline
interpolation: linear
discontinuous_nulls: false
show_null_points: true
series_types:
users.count: column
inner_radius: 50
series_labels:
'No': No Purchase
'Yes': Results in Purchase
series_colors: {}
note_state: collapsed
note_display: below
note_text: Percent of unique visits that result in a purchase
listen:
Browser: events.browser
Traffic Source: users.traffic_source
Date: events.event_date
row: 3
col: 11
width: 5
height: 7
filters:
- name: Browser
title: Browser
type: field_filter
default_value: ''
allow_multiple_values: true
required: false
model: thelook
explore: events
listens_to_filters: []
field: events.browser
- name: Traffic Source
title: Traffic Source
type: field_filter
default_value:
allow_multiple_values: true
required: false
model: thelook
explore: events
listens_to_filters: []
field: users.traffic_source
- name: Date
title: Date
type: date_filter
default_value: 2 weeks
allow_multiple_values: true
required: false
| LookML | 4 | Honcharov12/pylookml | lookml/tests/files/the_look/dashboards/web_analytics_overview.dashboard.lookml | [
"MIT"
] |
LITHO,CANAL,CODIGO
002650,A,000263
002651,B,000271
002594,A,000305
002652,C,000083
002653,A,000077
002581,A,000376
002655,A,000350
002654,A,000112
002599,A,000310
002589,A,000279
002590,A,000246
002585,B,000245
002593,A,000254
002600,C,000325
002669,A,000185
002592,A,000104
002670,C,000039
002595,A,000382
002597,A,000056
002584,A,000223
002586,C,000011
002588,B,000384
002587,C,000367
002605,C,000248
002606,C,000001
002583,C,000216
002582,B,000209
002601,A,000225
002596,A,000227
002598,C,000102
002591,C,000105
002603,A,000342
002610,C,000170
002608,A,000142
002607,C,000129
002604,B,000294
002602,A,000152
| IDL | 2 | plandauro/admision | storage/calificacioncepreII/2019-IIidentificacionPE02.dlm | [
"MIT"
] |
instance Eq Int where
instance Eq Int where {}
instance Eq Int
instance Eq Int a where
instance Eq Int a where {}
instance Eq Int a
instance Eq (Maybe a) where
instance Eq (Maybe a) where {}
instance Eq (Maybe a)
instance Eq (a, b, c) where
instance Eq (a, b, c) where {}
instance Eq (a, b, c)
instance Eq [a] where
instance Eq [a] where {}
instance Eq [a]
instance Eq (a -> b) where
instance Eq (a -> b) where {}
instance Eq (a -> b)
instance Eq Foo where
bar (Foo Baz) (Foo Wix) = EQ
instance Eq Foo where {
bar (Foo Baz) (Foo Wix) = EQ
}
instance Eq (,) where
instance Eq (,) where {}
instance Eq (,)
instance Eq (Bar, Baz a b) where
instance Eq (Bar, Baz a b) where {}
instance Eq (Bar, Baz a b)
instance Eq [(Bar, Baz a b)] where
instance Eq [(Bar, Baz a b)] where {}
instance Eq [(Bar, Baz a b)]
instance Eq [Bar] where
instance Eq [Bar] where {}
instance Eq [Bar]
instance Eq [Bar a b] where
instance Eq [Bar a b] where {}
instance Eq [Bar a b]
instance Eq [Bar Baz b] where
instance Eq [Bar Baz b] where {}
instance Eq [Bar Baz b]
instance Show a => Read Int where {}
instance Show a => Read (Maybe a) where {}
instance (Show a, Eq a) => Read (Maybe a) where {}
instance (Foo (Bar [Baz])) => Read (Bar) where {}
instance (Foo (Bar (Baz, Baz))) => Read (Bar) where {}
instance Bar Foo where
baz :: Num b => a -> b -> a
baz' :: (Num a, Num b) => Maybe a -> Either String b -> Maybe (Either String a)
instance Foo a b m => Bar a b m where {}
instance ( Foo (Bar a b '[]) c ) => Baz a b (Bix a b c) where
instance (Foo baz ~ bar) => Wix baz where
toWix = undefined
Wix baz <> Wix baz' = Wix (baz <> baz')
instance Foo (f :+: g) where {}
instance (B :< a, A :< b) => Foo (A b) Bar where {}
instance Bar Foo where
foo F.D{..} = bar
instance Show Bar where
bar (Bar Baz) (Bar Baz) = EQ
instance forall location b c. (Show b, Monad c) => ErrorMonad (Value b) c where
unit = pure . Unit
instance Show B where c = d
-- a
instance Show B where c = d
| Haskell | 1 | matsubara0507/semantic | test/fixtures/haskell/corpus/type-class-instance-declarations.B.hs | [
"MIT"
] |
SUMMARY = "Google Cloud Platform Configuration"
LICENSE = "Apache-2.0"
LIC_FILES_CHKSUM = "file://LICENSE;md5=0ea4e253cc22ddc22117b9796e5ce5b7"
FILESEXTRAPATHS_prepend := "${THISDIR}/files:"
SRC_URI = "file://gcp-config.sh file://LICENSE"
S = "${WORKDIR}"
inherit deploy
do_deploy() {
if [ -z "${PROJECT_ID}" ]; then
echo "Error. PROJECT_ID bitbake/shell variable unset." >&2
exit 1
fi
if [ -z "${REGION_ID}" ]; then
echo "Error. REGION_ID bitbake/shell variable unset." >&2
exit 1
fi
if [ -z "${REGISTRY_ID}" ]; then
echo "Error. REGISTRY_ID bitbake/shell variable unset." >&2
exit 1
fi
install -d ${DEPLOYDIR}/persist/gcp
install -m 0700 ${WORKDIR}/gcp-config.sh ${DEPLOYDIR}/persist/gcp
sed -i -e 's,@PROJECT_ID@,${PROJECT_ID},g' \
-e 's,@REGION_ID@,${REGION_ID},g' \
-e 's,@REGISTRY_ID@,${REGISTRY_ID},g' \
${DEPLOYDIR}/persist/gcp/gcp-config.sh
}
addtask do_deploy after do_install before do_package
RDEPENDS_${PN} += "bash"
ALLOW_EMPTY_${PN} = "1"
| BitBake | 4 | tjwebb/community | tutorials/cloud-iot-mender-ota/image/meta-gcp-iot/recipes-gcp/gcp-config/gcp-config_1.0.bb | [
"Apache-2.0",
"CC-BY-4.0"
] |
class ArrayTest {
static Void testStr() {
x := Array<Str>(2)
x[0] = "a"
t := x[0]
echo(t)
s := x.size
echo(s)
}
static Void testInt() {
x := Array<Int>(2)
x[0] = 1
t := x[0]
echo(t)
s := x.size
echo(s)
}
} | Fantom | 3 | fanx-dev/fanx | library/baseTest/fan/ArrayTest.fan | [
"AFL-3.0"
] |
#include <metal_stdlib>
#include "OperationShaderTypes.h"
using namespace metal;
typedef struct {
float colorLevels;
} PosterizeUniform;
fragment half4 posterizeFragment(SingleInputVertexIO fragmentInput [[stage_in]],
texture2d<half> inputTexture [[texture(0)]],
constant PosterizeUniform& uniform [[buffer(1)]])
{
constexpr sampler quadSampler;
half4 color = inputTexture.sample(quadSampler, fragmentInput.textureCoordinate);
half colorLevels = half(uniform.colorLevels);
return floor((color * colorLevels) + half4(0.5)) / colorLevels;
}
| Metal | 4 | luoxiao/GPUImage3 | framework/Source/Operations/Posterize.metal | [
"BSD-3-Clause"
] |
name = simDERADC
time_delta = 1s
broker = tcp://localhost:5570
values
GLD/house0_l106_tm_mass_heat_coeff
GLD/house0_l106_tm_UA
GLD/house0_l106_tm_heating_system_type
GLD/house0_l106_tm_cooling_system_type
GLD/house0_l106_tm_auxiliary_system_type
GLD/house0_l106_tm_air_heat_capacity
GLD/house0_l106_tm_mass_heat_capacity
GLD/house0_l106_tm_design_heating_capacity
GLD/house0_l106_tm_design_cooling_capacity
GLD/house0_l106_tm_auxiliary_heat_capacity
GLD/house0_l106_tm_thermostat_deadband
GLD/house0_l106_tm_Qi
GLD/house0_l106_tm_Qa
GLD/house0_l106_tm_Qm
GLD/house0_l106_tm_Qh
GLD/house0_l106_tm_outdoor_temperature
GLD/house0_l106_tm_air_temperature
GLD/house0_l106_tm_mass_temperature
GLD/house0_l106_tm_thermostat_mode
GLD/house1_l106_tm_mass_heat_coeff
GLD/house1_l106_tm_UA
GLD/house1_l106_tm_heating_system_type
GLD/house1_l106_tm_cooling_system_type
GLD/house1_l106_tm_auxiliary_system_type
GLD/house1_l106_tm_air_heat_capacity
GLD/house1_l106_tm_mass_heat_capacity
GLD/house1_l106_tm_design_heating_capacity
GLD/house1_l106_tm_design_cooling_capacity
GLD/house1_l106_tm_auxiliary_heat_capacity
GLD/house1_l106_tm_thermostat_deadband
GLD/house1_l106_tm_Qi
GLD/house1_l106_tm_Qa
GLD/house1_l106_tm_Qm
GLD/house1_l106_tm_Qh
GLD/house1_l106_tm_outdoor_temperature
GLD/house1_l106_tm_air_temperature
GLD/house1_l106_tm_mass_temperature
GLD/house1_l106_tm_thermostat_mode
GLD/house2_l106_tm_mass_heat_coeff
GLD/house2_l106_tm_UA
GLD/house2_l106_tm_heating_system_type
GLD/house2_l106_tm_cooling_system_type
GLD/house2_l106_tm_auxiliary_system_type
GLD/house2_l106_tm_air_heat_capacity
GLD/house2_l106_tm_mass_heat_capacity
GLD/house2_l106_tm_design_heating_capacity
GLD/house2_l106_tm_design_cooling_capacity
GLD/house2_l106_tm_auxiliary_heat_capacity
GLD/house2_l106_tm_thermostat_deadband
GLD/house2_l106_tm_Qi
GLD/house2_l106_tm_Qa
GLD/house2_l106_tm_Qm
GLD/house2_l106_tm_Qh
GLD/house2_l106_tm_outdoor_temperature
GLD/house2_l106_tm_air_temperature
GLD/house2_l106_tm_mass_temperature
GLD/house2_l106_tm_thermostat_mode
GLD/house3_l106_tm_mass_heat_coeff
GLD/house3_l106_tm_UA
GLD/house3_l106_tm_heating_system_type
GLD/house3_l106_tm_cooling_system_type
GLD/house3_l106_tm_auxiliary_system_type
GLD/house3_l106_tm_air_heat_capacity
GLD/house3_l106_tm_mass_heat_capacity
GLD/house3_l106_tm_design_heating_capacity
GLD/house3_l106_tm_design_cooling_capacity
GLD/house3_l106_tm_auxiliary_heat_capacity
GLD/house3_l106_tm_thermostat_deadband
GLD/house3_l106_tm_Qi
GLD/house3_l106_tm_Qa
GLD/house3_l106_tm_Qm
GLD/house3_l106_tm_Qh
GLD/house3_l106_tm_outdoor_temperature
GLD/house3_l106_tm_air_temperature
GLD/house3_l106_tm_mass_temperature
GLD/house3_l106_tm_thermostat_mode
GLD/house4_l106_tm_mass_heat_coeff
GLD/house4_l106_tm_UA
GLD/house4_l106_tm_heating_system_type
GLD/house4_l106_tm_cooling_system_type
GLD/house4_l106_tm_auxiliary_system_type
GLD/house4_l106_tm_air_heat_capacity
GLD/house4_l106_tm_mass_heat_capacity
GLD/house4_l106_tm_design_heating_capacity
GLD/house4_l106_tm_design_cooling_capacity
GLD/house4_l106_tm_auxiliary_heat_capacity
GLD/house4_l106_tm_thermostat_deadband
GLD/house4_l106_tm_Qi
GLD/house4_l106_tm_Qa
GLD/house4_l106_tm_Qm
GLD/house4_l106_tm_Qh
GLD/house4_l106_tm_outdoor_temperature
GLD/house4_l106_tm_air_temperature
GLD/house4_l106_tm_mass_temperature
GLD/house4_l106_tm_thermostat_mode
GLD/house5_l106_tm_mass_heat_coeff
GLD/house5_l106_tm_UA
GLD/house5_l106_tm_heating_system_type
GLD/house5_l106_tm_cooling_system_type
GLD/house5_l106_tm_auxiliary_system_type
GLD/house5_l106_tm_air_heat_capacity
GLD/house5_l106_tm_mass_heat_capacity
GLD/house5_l106_tm_design_heating_capacity
GLD/house5_l106_tm_design_cooling_capacity
GLD/house5_l106_tm_auxiliary_heat_capacity
GLD/house5_l106_tm_thermostat_deadband
GLD/house5_l106_tm_Qi
GLD/house5_l106_tm_Qa
GLD/house5_l106_tm_Qm
GLD/house5_l106_tm_Qh
GLD/house5_l106_tm_outdoor_temperature
GLD/house5_l106_tm_air_temperature
GLD/house5_l106_tm_mass_temperature
GLD/house5_l106_tm_thermostat_mode
GLD/house6_l106_tm_mass_heat_coeff
GLD/house6_l106_tm_UA
GLD/house6_l106_tm_heating_system_type
GLD/house6_l106_tm_cooling_system_type
GLD/house6_l106_tm_auxiliary_system_type
GLD/house6_l106_tm_air_heat_capacity
GLD/house6_l106_tm_mass_heat_capacity
GLD/house6_l106_tm_design_heating_capacity
GLD/house6_l106_tm_design_cooling_capacity
GLD/house6_l106_tm_auxiliary_heat_capacity
GLD/house6_l106_tm_thermostat_deadband
GLD/house6_l106_tm_Qi
GLD/house6_l106_tm_Qa
GLD/house6_l106_tm_Qm
GLD/house6_l106_tm_Qh
GLD/house6_l106_tm_outdoor_temperature
GLD/house6_l106_tm_air_temperature
GLD/house6_l106_tm_mass_temperature
GLD/house6_l106_tm_thermostat_mode
GLD/house7_l106_tm_mass_heat_coeff
GLD/house7_l106_tm_UA
GLD/house7_l106_tm_heating_system_type
GLD/house7_l106_tm_cooling_system_type
GLD/house7_l106_tm_auxiliary_system_type
GLD/house7_l106_tm_air_heat_capacity
GLD/house7_l106_tm_mass_heat_capacity
GLD/house7_l106_tm_design_heating_capacity
GLD/house7_l106_tm_design_cooling_capacity
GLD/house7_l106_tm_auxiliary_heat_capacity
GLD/house7_l106_tm_thermostat_deadband
GLD/house7_l106_tm_Qi
GLD/house7_l106_tm_Qa
GLD/house7_l106_tm_Qm
GLD/house7_l106_tm_Qh
GLD/house7_l106_tm_outdoor_temperature
GLD/house7_l106_tm_air_temperature
GLD/house7_l106_tm_mass_temperature
GLD/house7_l106_tm_thermostat_mode
GLD/house8_l106_tm_mass_heat_coeff
GLD/house8_l106_tm_UA
GLD/house8_l106_tm_heating_system_type
GLD/house8_l106_tm_cooling_system_type
GLD/house8_l106_tm_auxiliary_system_type
GLD/house8_l106_tm_air_heat_capacity
GLD/house8_l106_tm_mass_heat_capacity
GLD/house8_l106_tm_design_heating_capacity
GLD/house8_l106_tm_design_cooling_capacity
GLD/house8_l106_tm_auxiliary_heat_capacity
GLD/house8_l106_tm_thermostat_deadband
GLD/house8_l106_tm_Qi
GLD/house8_l106_tm_Qa
GLD/house8_l106_tm_Qm
GLD/house8_l106_tm_Qh
GLD/house8_l106_tm_outdoor_temperature
GLD/house8_l106_tm_air_temperature
GLD/house8_l106_tm_mass_temperature
GLD/house8_l106_tm_thermostat_mode
GLD/house9_l106_tm_mass_heat_coeff
GLD/house9_l106_tm_UA
GLD/house9_l106_tm_heating_system_type
GLD/house9_l106_tm_cooling_system_type
GLD/house9_l106_tm_auxiliary_system_type
GLD/house9_l106_tm_air_heat_capacity
GLD/house9_l106_tm_mass_heat_capacity
GLD/house9_l106_tm_design_heating_capacity
GLD/house9_l106_tm_design_cooling_capacity
GLD/house9_l106_tm_auxiliary_heat_capacity
GLD/house9_l106_tm_thermostat_deadband
GLD/house9_l106_tm_Qi
GLD/house9_l106_tm_Qa
GLD/house9_l106_tm_Qm
GLD/house9_l106_tm_Qh
GLD/house9_l106_tm_outdoor_temperature
GLD/house9_l106_tm_air_temperature
GLD/house9_l106_tm_mass_temperature
GLD/house9_l106_tm_thermostat_mode
GLD/house10_l106_tm_mass_heat_coeff
GLD/house10_l106_tm_UA
GLD/house10_l106_tm_heating_system_type
GLD/house10_l106_tm_cooling_system_type
GLD/house10_l106_tm_auxiliary_system_type
GLD/house10_l106_tm_air_heat_capacity
GLD/house10_l106_tm_mass_heat_capacity
GLD/house10_l106_tm_design_heating_capacity
GLD/house10_l106_tm_design_cooling_capacity
GLD/house10_l106_tm_auxiliary_heat_capacity
GLD/house10_l106_tm_thermostat_deadband
GLD/house10_l106_tm_Qi
GLD/house10_l106_tm_Qa
GLD/house10_l106_tm_Qm
GLD/house10_l106_tm_Qh
GLD/house10_l106_tm_outdoor_temperature
GLD/house10_l106_tm_air_temperature
GLD/house10_l106_tm_mass_temperature
GLD/house10_l106_tm_thermostat_mode
GLD/house11_l106_tm_mass_heat_coeff
GLD/house11_l106_tm_UA
GLD/house11_l106_tm_heating_system_type
GLD/house11_l106_tm_cooling_system_type
GLD/house11_l106_tm_auxiliary_system_type
GLD/house11_l106_tm_air_heat_capacity
GLD/house11_l106_tm_mass_heat_capacity
GLD/house11_l106_tm_design_heating_capacity
GLD/house11_l106_tm_design_cooling_capacity
GLD/house11_l106_tm_auxiliary_heat_capacity
GLD/house11_l106_tm_thermostat_deadband
GLD/house11_l106_tm_Qi
GLD/house11_l106_tm_Qa
GLD/house11_l106_tm_Qm
GLD/house11_l106_tm_Qh
GLD/house11_l106_tm_outdoor_temperature
GLD/house11_l106_tm_air_temperature
GLD/house11_l106_tm_mass_temperature
GLD/house11_l106_tm_thermostat_mode
GLD/house12_l106_tm_mass_heat_coeff
GLD/house12_l106_tm_UA
GLD/house12_l106_tm_heating_system_type
GLD/house12_l106_tm_cooling_system_type
GLD/house12_l106_tm_auxiliary_system_type
GLD/house12_l106_tm_air_heat_capacity
GLD/house12_l106_tm_mass_heat_capacity
GLD/house12_l106_tm_design_heating_capacity
GLD/house12_l106_tm_design_cooling_capacity
GLD/house12_l106_tm_auxiliary_heat_capacity
GLD/house12_l106_tm_thermostat_deadband
GLD/house12_l106_tm_Qi
GLD/house12_l106_tm_Qa
GLD/house12_l106_tm_Qm
GLD/house12_l106_tm_Qh
GLD/house12_l106_tm_outdoor_temperature
GLD/house12_l106_tm_air_temperature
GLD/house12_l106_tm_mass_temperature
GLD/house12_l106_tm_thermostat_mode
GLD/house13_l106_tm_mass_heat_coeff
GLD/house13_l106_tm_UA
GLD/house13_l106_tm_heating_system_type
GLD/house13_l106_tm_cooling_system_type
GLD/house13_l106_tm_auxiliary_system_type
GLD/house13_l106_tm_air_heat_capacity
GLD/house13_l106_tm_mass_heat_capacity
GLD/house13_l106_tm_design_heating_capacity
GLD/house13_l106_tm_design_cooling_capacity
GLD/house13_l106_tm_auxiliary_heat_capacity
GLD/house13_l106_tm_thermostat_deadband
GLD/house13_l106_tm_Qi
GLD/house13_l106_tm_Qa
GLD/house13_l106_tm_Qm
GLD/house13_l106_tm_Qh
GLD/house13_l106_tm_outdoor_temperature
GLD/house13_l106_tm_air_temperature
GLD/house13_l106_tm_mass_temperature
GLD/house13_l106_tm_thermostat_mode
GLD/house14_l106_tm_mass_heat_coeff
GLD/house14_l106_tm_UA
GLD/house14_l106_tm_heating_system_type
GLD/house14_l106_tm_cooling_system_type
GLD/house14_l106_tm_auxiliary_system_type
GLD/house14_l106_tm_air_heat_capacity
GLD/house14_l106_tm_mass_heat_capacity
GLD/house14_l106_tm_design_heating_capacity
GLD/house14_l106_tm_design_cooling_capacity
GLD/house14_l106_tm_auxiliary_heat_capacity
GLD/house14_l106_tm_thermostat_deadband
GLD/house14_l106_tm_Qi
GLD/house14_l106_tm_Qa
GLD/house14_l106_tm_Qm
GLD/house14_l106_tm_Qh
GLD/house14_l106_tm_outdoor_temperature
GLD/house14_l106_tm_air_temperature
GLD/house14_l106_tm_mass_temperature
GLD/house14_l106_tm_thermostat_mode
GLD/house0_l107_tm_mass_heat_coeff
GLD/house0_l107_tm_UA
GLD/house0_l107_tm_heating_system_type
GLD/house0_l107_tm_cooling_system_type
GLD/house0_l107_tm_auxiliary_system_type
GLD/house0_l107_tm_air_heat_capacity
GLD/house0_l107_tm_mass_heat_capacity
GLD/house0_l107_tm_design_heating_capacity
GLD/house0_l107_tm_design_cooling_capacity
GLD/house0_l107_tm_auxiliary_heat_capacity
GLD/house0_l107_tm_thermostat_deadband
GLD/house0_l107_tm_Qi
GLD/house0_l107_tm_Qa
GLD/house0_l107_tm_Qm
GLD/house0_l107_tm_Qh
GLD/house0_l107_tm_outdoor_temperature
GLD/house0_l107_tm_air_temperature
GLD/house0_l107_tm_mass_temperature
GLD/house0_l107_tm_thermostat_mode
GLD/house1_l107_tm_mass_heat_coeff
GLD/house1_l107_tm_UA
GLD/house1_l107_tm_heating_system_type
GLD/house1_l107_tm_cooling_system_type
GLD/house1_l107_tm_auxiliary_system_type
GLD/house1_l107_tm_air_heat_capacity
GLD/house1_l107_tm_mass_heat_capacity
GLD/house1_l107_tm_design_heating_capacity
GLD/house1_l107_tm_design_cooling_capacity
GLD/house1_l107_tm_auxiliary_heat_capacity
GLD/house1_l107_tm_thermostat_deadband
GLD/house1_l107_tm_Qi
GLD/house1_l107_tm_Qa
GLD/house1_l107_tm_Qm
GLD/house1_l107_tm_Qh
GLD/house1_l107_tm_outdoor_temperature
GLD/house1_l107_tm_air_temperature
GLD/house1_l107_tm_mass_temperature
GLD/house1_l107_tm_thermostat_mode
GLD/house2_l107_tm_mass_heat_coeff
GLD/house2_l107_tm_UA
GLD/house2_l107_tm_heating_system_type
GLD/house2_l107_tm_cooling_system_type
GLD/house2_l107_tm_auxiliary_system_type
GLD/house2_l107_tm_air_heat_capacity
GLD/house2_l107_tm_mass_heat_capacity
GLD/house2_l107_tm_design_heating_capacity
GLD/house2_l107_tm_design_cooling_capacity
GLD/house2_l107_tm_auxiliary_heat_capacity
GLD/house2_l107_tm_thermostat_deadband
GLD/house2_l107_tm_Qi
GLD/house2_l107_tm_Qa
GLD/house2_l107_tm_Qm
GLD/house2_l107_tm_Qh
GLD/house2_l107_tm_outdoor_temperature
GLD/house2_l107_tm_air_temperature
GLD/house2_l107_tm_mass_temperature
GLD/house2_l107_tm_thermostat_mode
GLD/house4_l107_tm_mass_heat_coeff
GLD/house4_l107_tm_UA
GLD/house4_l107_tm_heating_system_type
GLD/house4_l107_tm_cooling_system_type
GLD/house4_l107_tm_auxiliary_system_type
GLD/house4_l107_tm_air_heat_capacity
GLD/house4_l107_tm_mass_heat_capacity
GLD/house4_l107_tm_design_heating_capacity
GLD/house4_l107_tm_design_cooling_capacity
GLD/house4_l107_tm_auxiliary_heat_capacity
GLD/house4_l107_tm_thermostat_deadband
GLD/house4_l107_tm_Qi
GLD/house4_l107_tm_Qa
GLD/house4_l107_tm_Qm
GLD/house4_l107_tm_Qh
GLD/house4_l107_tm_outdoor_temperature
GLD/house4_l107_tm_air_temperature
GLD/house4_l107_tm_mass_temperature
GLD/house4_l107_tm_thermostat_mode
GLD/house5_l107_tm_mass_heat_coeff
GLD/house5_l107_tm_UA
GLD/house5_l107_tm_heating_system_type
GLD/house5_l107_tm_cooling_system_type
GLD/house5_l107_tm_auxiliary_system_type
GLD/house5_l107_tm_air_heat_capacity
GLD/house5_l107_tm_mass_heat_capacity
GLD/house5_l107_tm_design_heating_capacity
GLD/house5_l107_tm_design_cooling_capacity
GLD/house5_l107_tm_auxiliary_heat_capacity
GLD/house5_l107_tm_thermostat_deadband
GLD/house5_l107_tm_Qi
GLD/house5_l107_tm_Qa
GLD/house5_l107_tm_Qm
GLD/house5_l107_tm_Qh
GLD/house5_l107_tm_outdoor_temperature
GLD/house5_l107_tm_air_temperature
GLD/house5_l107_tm_mass_temperature
GLD/house5_l107_tm_thermostat_mode
GLD/house6_l107_tm_mass_heat_coeff
GLD/house6_l107_tm_UA
GLD/house6_l107_tm_heating_system_type
GLD/house6_l107_tm_cooling_system_type
GLD/house6_l107_tm_auxiliary_system_type
GLD/house6_l107_tm_air_heat_capacity
GLD/house6_l107_tm_mass_heat_capacity
GLD/house6_l107_tm_design_heating_capacity
GLD/house6_l107_tm_design_cooling_capacity
GLD/house6_l107_tm_auxiliary_heat_capacity
GLD/house6_l107_tm_thermostat_deadband
GLD/house6_l107_tm_Qi
GLD/house6_l107_tm_Qa
GLD/house6_l107_tm_Qm
GLD/house6_l107_tm_Qh
GLD/house6_l107_tm_outdoor_temperature
GLD/house6_l107_tm_air_temperature
GLD/house6_l107_tm_mass_temperature
GLD/house6_l107_tm_thermostat_mode
GLD/house7_l107_tm_mass_heat_coeff
GLD/house7_l107_tm_UA
GLD/house7_l107_tm_heating_system_type
GLD/house7_l107_tm_cooling_system_type
GLD/house7_l107_tm_auxiliary_system_type
GLD/house7_l107_tm_air_heat_capacity
GLD/house7_l107_tm_mass_heat_capacity
GLD/house7_l107_tm_design_heating_capacity
GLD/house7_l107_tm_design_cooling_capacity
GLD/house7_l107_tm_auxiliary_heat_capacity
GLD/house7_l107_tm_thermostat_deadband
GLD/house7_l107_tm_Qi
GLD/house7_l107_tm_Qa
GLD/house7_l107_tm_Qm
GLD/house7_l107_tm_Qh
GLD/house7_l107_tm_outdoor_temperature
GLD/house7_l107_tm_air_temperature
GLD/house7_l107_tm_mass_temperature
GLD/house7_l107_tm_thermostat_mode
GLD/house8_l107_tm_mass_heat_coeff
GLD/house8_l107_tm_UA
GLD/house8_l107_tm_heating_system_type
GLD/house8_l107_tm_cooling_system_type
GLD/house8_l107_tm_auxiliary_system_type
GLD/house8_l107_tm_air_heat_capacity
GLD/house8_l107_tm_mass_heat_capacity
GLD/house8_l107_tm_design_heating_capacity
GLD/house8_l107_tm_design_cooling_capacity
GLD/house8_l107_tm_auxiliary_heat_capacity
GLD/house8_l107_tm_thermostat_deadband
GLD/house8_l107_tm_Qi
GLD/house8_l107_tm_Qa
GLD/house8_l107_tm_Qm
GLD/house8_l107_tm_Qh
GLD/house8_l107_tm_outdoor_temperature
GLD/house8_l107_tm_air_temperature
GLD/house8_l107_tm_mass_temperature
GLD/house8_l107_tm_thermostat_mode
GLD/house9_l107_tm_mass_heat_coeff
GLD/house9_l107_tm_UA
GLD/house9_l107_tm_heating_system_type
GLD/house9_l107_tm_cooling_system_type
GLD/house9_l107_tm_auxiliary_system_type
GLD/house9_l107_tm_air_heat_capacity
GLD/house9_l107_tm_mass_heat_capacity
GLD/house9_l107_tm_design_heating_capacity
GLD/house9_l107_tm_design_cooling_capacity
GLD/house9_l107_tm_auxiliary_heat_capacity
GLD/house9_l107_tm_thermostat_deadband
GLD/house9_l107_tm_Qi
GLD/house9_l107_tm_Qa
GLD/house9_l107_tm_Qm
GLD/house9_l107_tm_Qh
GLD/house9_l107_tm_outdoor_temperature
GLD/house9_l107_tm_air_temperature
GLD/house9_l107_tm_mass_temperature
GLD/house9_l107_tm_thermostat_mode
GLD/house10_l107_tm_mass_heat_coeff
GLD/house10_l107_tm_UA
GLD/house10_l107_tm_heating_system_type
GLD/house10_l107_tm_cooling_system_type
GLD/house10_l107_tm_auxiliary_system_type
GLD/house10_l107_tm_air_heat_capacity
GLD/house10_l107_tm_mass_heat_capacity
GLD/house10_l107_tm_design_heating_capacity
GLD/house10_l107_tm_design_cooling_capacity
GLD/house10_l107_tm_auxiliary_heat_capacity
GLD/house10_l107_tm_thermostat_deadband
GLD/house10_l107_tm_Qi
GLD/house10_l107_tm_Qa
GLD/house10_l107_tm_Qm
GLD/house10_l107_tm_Qh
GLD/house10_l107_tm_outdoor_temperature
GLD/house10_l107_tm_air_temperature
GLD/house10_l107_tm_mass_temperature
GLD/house10_l107_tm_thermostat_mode
GLD/house11_l107_tm_mass_heat_coeff
GLD/house11_l107_tm_UA
GLD/house11_l107_tm_heating_system_type
GLD/house11_l107_tm_cooling_system_type
GLD/house11_l107_tm_auxiliary_system_type
GLD/house11_l107_tm_air_heat_capacity
GLD/house11_l107_tm_mass_heat_capacity
GLD/house11_l107_tm_design_heating_capacity
GLD/house11_l107_tm_design_cooling_capacity
GLD/house11_l107_tm_auxiliary_heat_capacity
GLD/house11_l107_tm_thermostat_deadband
GLD/house11_l107_tm_Qi
GLD/house11_l107_tm_Qa
GLD/house11_l107_tm_Qm
GLD/house11_l107_tm_Qh
GLD/house11_l107_tm_outdoor_temperature
GLD/house11_l107_tm_air_temperature
GLD/house11_l107_tm_mass_temperature
GLD/house11_l107_tm_thermostat_mode
GLD/house12_l107_tm_mass_heat_coeff
GLD/house12_l107_tm_UA
GLD/house12_l107_tm_heating_system_type
GLD/house12_l107_tm_cooling_system_type
GLD/house12_l107_tm_auxiliary_system_type
GLD/house12_l107_tm_air_heat_capacity
GLD/house12_l107_tm_mass_heat_capacity
GLD/house12_l107_tm_design_heating_capacity
GLD/house12_l107_tm_design_cooling_capacity
GLD/house12_l107_tm_auxiliary_heat_capacity
GLD/house12_l107_tm_thermostat_deadband
GLD/house12_l107_tm_Qi
GLD/house12_l107_tm_Qa
GLD/house12_l107_tm_Qm
GLD/house12_l107_tm_Qh
GLD/house12_l107_tm_outdoor_temperature
GLD/house12_l107_tm_air_temperature
GLD/house12_l107_tm_mass_temperature
GLD/house12_l107_tm_thermostat_mode
GLD/house13_l107_tm_mass_heat_coeff
GLD/house13_l107_tm_UA
GLD/house13_l107_tm_heating_system_type
GLD/house13_l107_tm_cooling_system_type
GLD/house13_l107_tm_auxiliary_system_type
GLD/house13_l107_tm_air_heat_capacity
GLD/house13_l107_tm_mass_heat_capacity
GLD/house13_l107_tm_design_heating_capacity
GLD/house13_l107_tm_design_cooling_capacity
GLD/house13_l107_tm_auxiliary_heat_capacity
GLD/house13_l107_tm_thermostat_deadband
GLD/house13_l107_tm_Qi
GLD/house13_l107_tm_Qa
GLD/house13_l107_tm_Qm
GLD/house13_l107_tm_Qh
GLD/house13_l107_tm_outdoor_temperature
GLD/house13_l107_tm_air_temperature
GLD/house13_l107_tm_mass_temperature
GLD/house13_l107_tm_thermostat_mode
| Zimpl | 1 | bishnubhattarai/PFO-ADC-DER-Testbed | related/shared_lib_test/MyProduct/MyProduct/for_testing/fncs.zpl | [
"BSD-2-Clause"
] |
#!/bin/bash
# Copyright 2020 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
# Script for helping to record method for building the RBE docker images.
#
# The first argument to the script is expected to be the name of the docker file
# to build. Example:
#
# $ ./build_rbe.sh Dockerfile.rbe.ubuntu16.04-manylinux2010
function main() {
set -eu
cd "${0%/*}"
local DOCKERFILE="$(basename "$1")"
if [[ ! -e "$DOCKERFILE" ]]; then
echo "$DOCKERFILE does not exist in $PWD" >> /dev/stderr
exit 1
fi
local IMAGE_NAME_SUFFIX="${1#Dockerfile.rbe.}"
if [[ "$IMAGE_NAME_SUFFIX" == "$DOCKERFILE" ]]; then
echo 'File must start with "Dockerfile.rbe."' >> /dev/stderr
exit 1
fi
local ARGS=(
--config=cloudbuild.yaml
--machine-type=n1-highcpu-32
--substitutions=_DOCKERFILE="$1",_IMAGE_NAME="nosla-$IMAGE_NAME_SUFFIX"
--timeout=1h
)
gcloud --project=tensorflow-testing builds submit "${ARGS[@]}" .
}
main "$@"
| Shell | 4 | yage99/tensorflow | tensorflow/tools/ci_build/build_rbe.sh | [
"Apache-2.0"
] |
package tastytest
/** contains transitive erasure of opaque types: Entity -> Id -> Long */
object EntityDb {
import Ids._, Entities._
object Ids {
opaque type Id = Long
object Id {
private var _nextId = 0L
def nextId: Id = {
val id = _nextId
_nextId += 1
id
}
final implicit class IdOps(val id: Id) extends AnyVal {
def toLong: Long = id
}
}
}
object Entities {
opaque type Entity[+T] = Id
object Entity {
def ofKind[T <: Singleton](id: Id)(kind: T): Entity[kind.type] = id
final implicit class EntityOps[T](val entity: Entity[T]) extends AnyVal {
def id: Id = entity
}
}
}
object Data {
opaque type Person = Kind.OfPerson.type
private enum Kind { case OfPerson }
def newPerson(id: Id)(name: String, age: Int): Entity[Person] =
personName(id.toLong) = name
personAge(id.toLong) = age
Entity.ofKind(id)(Kind.OfPerson)
final implicit class PersonOps(val person: Entity[Person]) extends AnyVal {
def name: String = personName(person.id.toLong)
def age: Int = personAge(person.id.toLong)
}
private val personName = collection.mutable.LongMap.empty[String]
private val personAge = collection.mutable.LongMap.empty[Int]
}
}
| Scala | 4 | stefan-jurco/scala | test/tasty/run/src-3/tastytest/EntityDb.scala | [
"Apache-2.0"
] |
fileFormatVersion: 2
guid: 6e74971328614f4e8ecb0a9e39610d40
timeCreated: 1614328670 | Unity3D Asset | 0 | danyow/xLua | Assets/XLua/Src/LuaAsset.cs.meta | [
"BSD-3-Clause"
] |
CREATE KEYSPACE inventory
WITH replication = {
'class' : 'NetworkTopologyStrategy',
'datacenter1' : 3
};
use inventory;
CREATE TABLE vehicles (
vin text PRIMARY KEY,
year int,
make varchar,
model varchar
);
consistency LOCAL_QUORUM;
insert into vehicles (vin, year, make, model) values ('387KSJHFK23874GH', 2020, 'Ford', 'F-150');
insert into vehicles (vin, year, make, model) values ('534HNDHFK23873EF', 2020, 'Honda', 'Accord');
insert into vehicles (vin, year, make, model) values ('953TOYJEK23853DB', 2020, 'Toyota', 'Camry'); | SQL | 3 | DBatOWL/tutorials | persistence-modules/spring-data-cassandra-test/src/test/resources/bootstrap-test.cql | [
"MIT"
] |
= ProofObjects : The Curry-Howard Correspondence
> module ProofObjects
>
\say{\textit{Algorithms are the computational content of proofs.}}
-- Robert Harper
> import Logic
> import IndProp
>
We have seen that Idris has mechanisms both for _programming_, using inductive
data types like \idr{Nat} or \idr{List} and functions over these types, and for
_proving_ properties of these programs, using inductive propositions (like
\idr{Ev}), implication, universal quantification, and the like. So far, we have
mostly treated these mechanisms as if they were quite separate, and for many
purposes this is a good way to think. But we have also seen hints that Idris's
programming and proving facilities are closely related. For example, the keyword
\idr{data} is used to declare both data types and propositions, and \idr{->} is
used both to describe the type of functions on data and logical implication.
This is not just a syntactic accident! In fact, programs and proofs in Idris are
almost the same thing. In this chapter we will study how this works.
We have already seen the fundamental idea: provability in Idris is represented
by concrete _evidence_. When we construct the proof of a basic proposition, we
are actually building a tree of evidence, which can be thought of as a data
structure.
If the proposition is an implication like \idr{A -> B}, then its proof will be
an evidence _transformer_: a recipe for converting evidence for \idr{A} into
evidence for \idr{B}. So at a fundamental level, proofs are simply programs that
manipulate evidence.
Question: If evidence is data, what are propositions themselves?
Answer: They are types!
Look again at the formal definition of the \idr{Ev} property.
```idris
data Ev : Nat -> Type where
Ev_0 : Ev Z
Ev_SS : {n : Nat} -> Ev n -> Ev (S (S n))
```
Suppose we introduce an alternative pronunciation of "\idr{:}". Instead of "has
type," we can say "is a proof of." For example, the second line in the
definition of \idr{Ev} declares that \idr{Ev_0 : Ev 0}. Instead of "\idr{Ev_0}
has type \idr{Ev 0}," we can say that "\idr{Ev_0} is a proof of \idr{Ev 0}."
This pun between types and propositions — between \idr{:} as "has type" and
\idr{:} as "is a proof of" or "is evidence for" — is called the Curry-Howard
correspondence. It proposes a deep connection between the world of logic and the
world of computation:
propositions ~ types
proofs ~ data values
\todo[inline]{Add http://dl.acm.org/citation.cfm?id=2699407 as a link}
See [Wadler 2015] for a brief history and an up-to-date exposition.
Many useful insights follow from this connection. To begin with, it gives us a
natural interpretation of the type of the \idr{Ev_SS} constructor:
```idris
λΠ> :t Ev_SS
Ev_SS : Ev n -> Ev (S (S n))
```
This can be read "\idr{Ev_SS} is a constructor that takes two arguments — a
number \idr{n} and evidence for the proposition \idr{Ev n} — and yields evidence
for the proposition \idr{Ev (S (S n))}."
Now let's look again at a previous proof involving \idr{Ev}.
> ev_4 : Ev 4
> ev_4 = Ev_SS {n=2} $ Ev_SS {n=0} Ev_0
As with ordinary data values and functions, we can use the \idr{:printdef}
command to see the proof object that results from this proof script.
```idris
λΠ> :printdef ev_4
ev_4 : Ev 4
ev_4 = Ev_SS (Ev_SS Ev_0)
```
As a matter of fact, we can also write down this proof object directly, without
the need for a separate proof script:
```idris
λΠ> Ev_SS $ Ev_SS Ev_0
Ev_SS (Ev_SS Ev_0) : Ev 4
```
The expression \idr{Ev_SS {n=2} $ Ev_SS {n=0} Ev_0} can be thought of as
instantiating the parameterized constructor \idr{Ev_SS} with the specific
arguments \idr{2} and \idr{0} plus the corresponding proof objects for its
premises \idr{Ev 2} and \idr{Ev 0}. Alternatively, we can think of \idr{Ev_SS}
as a primitive "evidence constructor" that, when applied to a particular number,
wants to be further applied to evidence that that number is even; its type,
```idris
{n : Nat} -> Ev n -> Ev (S (S n))
```
expresses this functionality, in the same way that the polymorphic type
\idr{{x : Type} -> List x} expresses the fact that the constructor \idr{Nil} can
be thought of as a function from types to empty lists with elements of that
type.
\todo[inline]{Edit or remove}
We saw in the `Logic` chapter that we can use function application syntax to
instantiate universally quantified variables in lemmas, as well as to supply
evidence for assumptions that these lemmas impose. For instance:
```coq
Theorem ev_4': ev 4.
Proof.
apply (ev_SS 2 (ev_SS 0 ev_0)).
Qed.
```
We can now see that this feature is a trivial consequence of the status the
Idris grants to proofs and propositions: Lemmas and hypotheses can be combined
in expressions (i.e., proof objects) according to the same basic rules used for
programs in the language.
== Proof Scripts
\ \todo[inline]{Rewrite, keep explanation about holes? Seems a bit late for
that}
The _proof objects_ we've been discussing lie at the core of how Idris operates.
When Idris is following a proof script, what is happening internally is that it
is gradually constructing a proof object — a term whose type is the proposition
being proved. The expression on the right hand side of \idr{=} tell it how to
build up a term of the required type. To see this process in action, let's use
the `Show Proof` command to display the current state of the proof tree at
various points in the following tactic proof.
```coq
Theorem ev_4'' : ev 4.
Proof.
Show Proof.
apply ev_SS.
Show Proof.
apply ev_SS.
Show Proof.
apply ev_0.
Show Proof.
Qed.
```
At any given moment, Idris has constructed a term with a "hole" (indicated by
`?Goal` here, and so on), and it knows what type of evidence is needed to fill
this hole.
Each hole corresponds to a subgoal, and the proof is finished when there are no
more subgoals. At this point, the evidence we've built stored in the global
context under the name given in the type definition.
Tactic proofs are useful and convenient, but they are not essential: in
principle, we can always construct the required evidence by hand, as shown
above. Then we can use `Definition` (rather than `Theorem`) to give a global
name directly to a piece of evidence.
```coq
Definition ev_4''' : ev 4 :=
ev_SS 2 (ev_SS 0 ev_0).
```
All these different ways of building the proof lead to exactly the same evidence
being saved in the global environment.
```coq
Print ev_4.
(* ===> ev_4 = ev_SS 2 (ev_SS 0 ev_0) : ev 4 *)
Print ev_4'.
(* ===> ev_4' = ev_SS 2 (ev_SS 0 ev_0) : ev 4 *)
Print ev_4''.
(* ===> ev_4'' = ev_SS 2 (ev_SS 0 ev_0) : ev 4 *)
Print ev_4'''.
(* ===> ev_4''' = ev_SS 2 (ev_SS 0 ev_0) : ev 4 *)
```
==== Exercise: 1 star (eight_is_even)
\ \todo[inline]{Remove?}
Give a tactic proof and a proof object showing that \idr{Ev 8}.
> ev_8 : Ev 8
> ev_8 = ?ev_8_rhs
$\square$
==== Quantifiers, Implications, Functions
\ \todo[inline]{Edit the section}
In Idris's computational universe (where data structures and programs live),
there are two sorts of values with arrows in their types: _constructors_
introduced by \idr{data} definitions, and _functions_.
Similarly, in Idris's logical universe (where we carry out proofs), there are
two ways of giving evidence for an implication: constructors introduced by
\idr{data}-defined propositions, and... functions!
For example, consider this statement:
> ev_plus4 : Ev n -> Ev (4 + n)
> ev_plus4 x = Ev_SS $ Ev_SS x
What is the proof object corresponding to `ev_plus4`?
We're looking for an expression whose type is
\idr{{n: Nat} -> Ev n -> Ev (4 + n)} — that is, a function that takes two
arguments (one number and a piece of evidence) and returns a piece of evidence!
Here it is:
```coq
Definition ev_plus4' : forall n, ev n -> ev (4 + n) :=
fun (n : Nat) => fun (H : ev n) =>
ev_SS (S (S n)) (ev_SS n H).
```
Recall that \idr{\n => blah} means "the function that, given \idr{n}, yields
\idr{blah}," and that Idris treats \idr{4 + n} and \idr{S (S (S (S n)))} as
synonyms. Another equivalent way to write this definition is:
```coq
Definition ev_plus4'' (n : Nat) (H : ev n) : ev (4 + n) :=
ev_SS (S (S n)) (ev_SS n H).
Check ev_plus4''.
(* ===> ev_plus4'' : forall n : Nat, ev n -> ev (4 + n) *)
```
When we view the proposition being proved by \idr{ev_plus4} as a function type,
one aspect of it may seem a little unusual. The second argument's type,
\idr{Ev n}, mentions the _value_ of the first argument, \idr{n}. While such
_dependent types_ are not found in conventional programming languages, they can
be useful in programming too, as the recent flurry of activity in the functional
programming community demonstrates.
\todo[inline]{Reword?}
Notice that both implication (\idr{->}) and quantification
(\idr{(x : t) -> f x}) correspond to functions on evidence. In fact, they are
really the same thing: \idr{->} is just a shorthand for a degenerate use of
quantification where there is no dependency, i.e., no need to give a name to the
type on the left-hand side of the arrow.
For example, consider this proposition:
> ev_plus2 : Type
> ev_plus2 = (n : Nat) -> (e : Ev n) -> Ev (n + 2)
A proof term inhabiting this proposition would be a function with two arguments:
a number \idr{n} and some evidence \idr{e} that \idr{n} is even. But the name
\idr{e} for this evidence is not used in the rest of the statement of
\idr{ev_plus2}, so it's a bit silly to bother making up a name for it. We could
write it like this instead:
> ev_plus2' : Type
> ev_plus2' = (n : Nat) -> Ev n -> Ev (n + 2)
In general, "\idr{p -> q}" is just syntactic sugar for "\idr{(_ : p) -> q}".
== Programming with Tactics
\ \todo[inline]{Edit and move to an appendix about ElabReflection/Pruviloj?}
If we can build proofs by giving explicit terms rather than executing tactic
scripts, you may be wondering whether we can build _programs_ using _tactics_
rather than explicit terms. Naturally, the answer is yes!
```coq
Definition add1 : Nat -> Nat.
intro n.
Show Proof.
apply S.
Show Proof.
apply n. Defined.
Print add1.
(* ==>
add1 = fun n : Nat => S n
: Nat -> Nat
*)
Compute add1 2.
(* ==> 3 : Nat *)
```
Notice that we terminate the `Definition` with a `.` rather than with `:=`
followed by a term. This tells Idris to enter _proof scripting mode_ to build an
object of type \idr{Nat -> Nat}. Also, we terminate the proof with `Defined`
rather than `Qed`; this makes the definition _transparent_ so that it can be
used in computation like a normally-defined function. (`Qed`-defined objects are
opaque during computation.)
This feature is mainly useful for writing functions with dependent types, which
we won't explore much further in this book. But it does illustrate the
uniformity and orthogonality of the basic ideas in Idris.
== Logical Connectives as Inductive Types
Inductive definitions are powerful enough to express most of the connectives and
quantifiers we have seen so far. Indeed, only universal quantification (and thus
implication) is built into Idris; all the others are defined inductively. We'll
see these definitions in this section.
=== Conjunction
\ \todo[inline]{Edit}
To prove that \idr{(p,q)} holds, we must present evidence for both \idr{p} and
\idr{q}. Thus, it makes sense to define a proof object for \idr{(p,q)} as
consisting of a pair of two proofs: one for \idr{p} and another one for \idr{q}.
This leads to the following definition.
> data And : (p, q : Type) -> Type where
> Conj : p -> q -> And p q
Notice the similarity with the definition of the \idr{Prod} type, given in
chapter `Poly`; the only difference is that \idr{Prod} takes Type arguments,
whereas and takes Prop arguments.
```idris
data Prod : (x, y : Type) -> Type where
PPair : x -> y -> Prod x y
```
This should clarify why pattern matching can be used on a conjunctive
hypothesis. Case analysis allows us to consider all possible ways in which
\idr{(p,q)} was proved — here just one (the \idr{Conj} constructor). Similarly,
the `split` tactic actually works for any inductively defined proposition with
only one constructor. In particular, it works for \idr{And}:
> and_comm : (And p q) <-> (And q p)
> and_comm = (\(Conj x y) => Conj y x,
> \(Conj y x) => Conj x y)
This shows why the inductive definition of `and` can be manipulated by tactics
as we've been doing. We can also use it to build proofs directly, using
pattern-matching. For instance:
> and_comm'_aux : And p q -> And q p
> and_comm'_aux (Conj x y) = Conj y x
> and_comm' : (And p q) <-> (And q p)
> and_comm' {p} {q} = (and_comm'_aux {p} {q}, and_comm'_aux {p=q} {q=p})
==== Exercise: 2 stars, optional (conj_fact)
Construct a proof object demonstrating the following proposition.
> conj_fact : And p q -> And q r -> And p r
> conj_fact pq qr = ?conj_fact_rhs
$\square$
=== Disjunction
The inductive definition of disjunction uses two constructors, one for each side
of the disjunct:
> data Or : (p, q : Type) -> Type where
> IntroL : p -> Or p q
> IntroR : q -> Or p q
This declaration explains the behavior of pattern matching on a disjunctive
hypothesis, since the generated subgoals match the shape of the \idr{IntroL} and
\idr{IntroR} constructors.
Once again, we can also directly write proof objects for theorems involving
\idr{Or}, without resorting to tactics.
==== Exercise: 2 stars, optional (or_comm)
\ \todo[inline]{Edit}
Try to write down an explicit proof object for \idr{or_comm} (without using
`Print` to peek at the ones we already defined!).
> or_comm : Or p q -> Or q p
> or_comm pq = ?or_comm_rhs
$\square$
=== Existential Quantification
To give evidence for an existential quantifier, we package a witness \idr{x}
together with a proof that \idr{x} satisfies the property \idr{p}:
> data Ex : (p : a -> Type) -> Type where
> ExIntro : (x : a) -> p x -> Ex p
This may benefit from a little unpacking. The core definition is for a type
former \idr{Ex} that can be used to build propositions of the form \idr{Ex p},
where \idr{p} itself is a function from witness values in the type \idr{a} to
propositions. The \idr{ExIntro} constructor then offers a way of constructing
evidence for \idr{Ex p}, given a witness \idr{x} and a proof of \idr{p x}.
The more familiar form \idr{(x ** p x)} desugars to an expression involving
\idr{Ex}:
\todo[inline]{Edit}
```coq
Check ex (fun n => ev n).
(* ===> exists n : Nat, ev n
: Prop *)
```
Here's how to define an explicit proof object involving \idr{Ex}:
> some_nat_is_even : Ex (\n => Ev n)
> some_nat_is_even = ExIntro 4 (Ev_SS $ Ev_SS Ev_0)
==== Exercise: 2 stars, optional (ex_ev_Sn)
Complete the definition of the following proof object:
> ex_ev_Sn : Ex (\n => Ev (S n))
> ex_ev_Sn = ?ex_ev_Sn_rhs
$\square$
\subsection{\idr{Unit} and \idr{Void}}
The inductive definition of the \idr{Unit} proposition is simple:
```idris
data Unit : Type where
() : Unit
```
It has one constructor (so every proof of \idr{Unit} is the same, so being given
a proof of\idr{Unit} is not informative.)
\idr{Void} is equally simple — indeed, so simple it may look syntactically wrong
at first glance!
\todo[inline]{Edit, this actually is wrong, stdlib uses \idr{runElab} to define
it}
```idris
data Void : Type where
```
That is, \idr{Void} is an inductive type with _no_ constructors — i.e., no way
to build evidence for it.
== Equality
\ \todo[inline]{Edit, it actually is built in}
Even Idris's equality relation is not built in. It has the following inductive
definition. (Actually, the definition in the standard library is a small variant
of this, which gives an induction principle that is slightly easier to use.)
> data PropEq : {t : Type} -> t -> t -> Type where
> EqRefl : PropEq x x
> syntax [x] "='" [y] = PropEq x y
The way to think about this definition is that, given a set \idr{t}, it defines
a _family_ of propositions "\idr{x} is equal to \idr{y}," indexed by pairs of
values (\idr{x} and \idr{y}) from \idr{t}. There is just one way of constructing
evidence for each member of this family: applying the constructor \idr{EqRefl}
to a type \idr{t} and a value \idr{x : t} yields evidence that \idr{x} is equal
to \idr{x}.
\todo[inline]{Edit}
We can use \idr{EqRefl} to construct evidence that, for example, \idr{2 = 2}.
Can we also use it to construct evidence that \idr{1 + 1 = 2}? Yes, we can.
Indeed, it is the very same piece of evidence! The reason is that Idris treats
as "the same" any two terms that are _convertible_ according to a simple set of
computation rules. These rules, which are similar to those used by `Compute`,
include evaluation of function application, inlining of definitions, and
simplification of `match`es.
> four : (2 + 2) =' (1 + 3)
> four = EqRefl
The \idr{Refl} that we have used to prove equalities up to now is essentially
just an application of an equality constructor.
\todo[inline]{Edit}
In tactic-based proofs of equality, the conversion rules are normally hidden in
uses of `simpl` (either explicit or implicit in other tactics such as
`reflexivity`). But you can see them directly at work in the following explicit
proof objects:
```coq
Definition four' : 2 + 2 = 1 + 3 :=
eq_refl 4.
```
> singleton : ([]++[x]) =' (x::[])
> singleton = EqRefl
> quiz6 : Ex (\x => (x + 3) =' 4)
> quiz6 = ExIntro 1 EqRefl
==== Exercise: 2 stars (equality__leibniz_equality)
The inductive definition of equality corresponds to _Leibniz equality_: what we
mean when we say "\idr{x} and \idr{y} are equal" is that every property \idr{p}
that is true of \idr{x} is also true of \idr{y}.
> equality__leibniz_equality : (x =' y) -> ((p : t -> Type) -> p x -> p y)
> equality__leibniz_equality eq p px = ?equality__leibniz_equality_rhs
$\square$
==== Exercise: 5 stars, optional (leibniz_equality__equality)
Show that, in fact, the inductive definition of equality is _equivalent_ to
Leibniz equality:
> leibniz_equality__equality : ((p : t -> Type) -> p x -> p y) -> (x =' y)
> leibniz_equality__equality pxy = ?leibniz_equality__equality_rhs
$\square$
=== Inversion, Again
\ \todo[inline]{Edit/remove}
We've seen `inversion` used with both equality hypotheses and hypotheses about
inductively defined propositions. Now that we've seen that these are actually
the same thing, we're in a position to take a closer look at how `inversion`
behaves.
In general, the `inversion` tactic...
- takes a hypothesis `H` whose type `P` is inductively defined, and
- for each constructor `C` in `P`'s definition,
- generates a new subgoal in which we assume `H` was built with `C`,
- adds the arguments (premises) of `C` to the context of the subgoal as
extra hypotheses,
- matches the conclusion (result type) of `C` against the current goal and
calculates a set of equalities that must hold in order for `C` to be
applicable,
- adds these equalities to the context (and, for convenience, rewrites them
in the goal), and
- if the equalities are not satisfiable (e.g., they involve things like
\idr{S n = Z}), immediately solves the subgoal.
_Example_: If we invert a hypothesis built with \idr{Or}, there are two
constructors, so two subgoals get generated. The conclusion (result type) of the
constructor (\idr{Or p q}) doesn't place any restrictions on the form of \idr{p}
or \idr{q}, so we don't get any extra equalities in the context of the subgoal.
_Example_: If we invert a hypothesis built with \idr{And}, there is only one
constructor, so only one subgoal gets generated. Again, the conclusion (result
type) of the constructor (\idr{And p q}) doesn't place any restrictions on the
form of \idr{p} or \idr{q}, so we don't get any extra equalities in the context
of the subgoal. The constructor does have two arguments, though, and these can
be seen in the context in the subgoal.
_Example_: If we invert a hypothesis built with \idr{PropEq}, there is again
only one constructor, so only one subgoal gets generated. Now, though, the form
of the \idr{EqRefl} constructor does give us some extra information: it tells us
that the two arguments to \idr{PropEq} must be the same! The `inversion` tactic
adds this fact to the context.
| Idris | 5 | diseraluca/software-foundations | src/ProofObjects.lidr | [
"MIT"
] |
# frozen_string_literal: true
module ActionText
class Content
include Rendering, Serialization
attr_reader :fragment
delegate :blank?, :empty?, :html_safe, :present?, to: :to_html # Delegating to to_html to avoid including the layout
class << self
def fragment_by_canonicalizing_content(content)
fragment = ActionText::Attachment.fragment_by_canonicalizing_attachments(content)
fragment = ActionText::AttachmentGallery.fragment_by_canonicalizing_attachment_galleries(fragment)
fragment
end
end
def initialize(content = nil, options = {})
options.with_defaults! canonicalize: true
if options[:canonicalize]
@fragment = self.class.fragment_by_canonicalizing_content(content)
else
@fragment = ActionText::Fragment.wrap(content)
end
end
def links
@links ||= fragment.find_all("a[href]").map { |a| a["href"] }.uniq
end
def attachments
@attachments ||= attachment_nodes.map do |node|
attachment_for_node(node)
end
end
def attachment_galleries
@attachment_galleries ||= attachment_gallery_nodes.map do |node|
attachment_gallery_for_node(node)
end
end
def gallery_attachments
@gallery_attachments ||= attachment_galleries.flat_map(&:attachments)
end
def attachables
@attachables ||= attachment_nodes.map do |node|
ActionText::Attachable.from_node(node)
end
end
def append_attachables(attachables)
attachments = ActionText::Attachment.from_attachables(attachables)
self.class.new([self.to_s.presence, *attachments].compact.join("\n"))
end
def render_attachments(**options, &block)
content = fragment.replace(ActionText::Attachment.tag_name) do |node|
block.call(attachment_for_node(node, **options))
end
self.class.new(content, canonicalize: false)
end
def render_attachment_galleries(&block)
content = ActionText::AttachmentGallery.fragment_by_replacing_attachment_gallery_nodes(fragment) do |node|
block.call(attachment_gallery_for_node(node))
end
self.class.new(content, canonicalize: false)
end
def to_plain_text
render_attachments(with_full_attributes: false, &:to_plain_text).fragment.to_plain_text
end
def to_trix_html
render_attachments(&:to_trix_attachment).to_html
end
def to_html
fragment.to_html
end
def to_rendered_html_with_layout
render layout: "action_text/contents/content", partial: to_partial_path, formats: :html, locals: { content: self }
end
def to_partial_path
"action_text/contents/content"
end
def to_s
to_rendered_html_with_layout
end
def as_json(*)
to_html
end
def inspect
"#<#{self.class.name} #{to_s.truncate(25).inspect}>"
end
def ==(other)
if other.is_a?(self.class)
to_s == other.to_s
end
end
private
def attachment_nodes
@attachment_nodes ||= fragment.find_all(ActionText::Attachment.tag_name)
end
def attachment_gallery_nodes
@attachment_gallery_nodes ||= ActionText::AttachmentGallery.find_attachment_gallery_nodes(fragment)
end
def attachment_for_node(node, with_full_attributes: true)
attachment = ActionText::Attachment.from_node(node)
with_full_attributes ? attachment.with_full_attributes : attachment
end
def attachment_gallery_for_node(node)
ActionText::AttachmentGallery.from_node(node)
end
end
end
ActiveSupport.run_load_hooks :action_text_content, ActionText::Content
| Ruby | 4 | jstncarvalho/rails | actiontext/lib/action_text/content.rb | [
"MIT"
] |
/*
Copyright © 2011 MLstate
This file is part of Opa.
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
*/
/**
*
* @author Hugo Venturini, Hugo Heuzard
* @creation 12/2010
*
* @category internet, network
* @destination public
*
*/
/**
* {1 About this module}
*
* This module is an IPv4 tool-box.
*
* {1 Where should I start?}
*
* {1 What if I need more?}
*/
/**
* {1 Types defined in this module}
*/
/**
* The type of an IP address or a Netmask.
*/
@opacapi
type ip = IPv4.ip
type IPv4.ip = { a : int ; b : int ; c : int ; d : int }
/**
* {1 Interface}
*/
IPv4 =
{{
/**
* {2 Parser }
*/
ip_parser : Parser.general_parser(IPv4.ip) =
parser
| "::1" -> {a=127 b=0 c=0 d=1} // Loopback address (in hybrid IP notation).
| "::ffff:" ? s1=Rule.byte "." s2=Rule.byte "." s3=Rule.byte "." s4=Rule.byte -> {a=s1 b=s2 c=s3 d=s4}
/**
* {2 Conversion functions }
*/
@stringifier(IPv4.ip)
string_of_ip(ip : IPv4.ip) : string = "{ip.a}.{ip.b}.{ip.c}.{ip.d}"
ip_of_string(ip : string) : IPv4.ip =
Parser.parse(ip_parser, ip)
ip_of_string_opt(ip : string) : option(IPv4.ip) =
Parser.try_parse(ip_parser, ip)
}}
| Opa | 5 | Machiaweliczny/oppailang | lib/stdlib/core/web/core/ip.opa | [
"MIT"
] |
CoordMode, Pixel, {{ coord_mode }}
ImageSearch,xpos,ypos,{{ x1 }},{{ y1 }},{{ x2 }},{{ y2 }},{% if options %}{% for option in options %}*{{ option }} {% endfor %}{% endif %}{{ image_path }}
| AutoHotkey | 3 | scslmd/ahk | ahk/templates/daemon/image_search.ahk | [
"MIT"
] |
--TEST--
Bug #35411 (Regression with \{$ handling)
--FILE--
<?php
$abc = "bar";
echo "foo\{$abc}baz\n";
echo "foo\{ $abc}baz\n";
echo <<<TEST
foo{$abc}baz
foo\{$abc}baz
foo\{ $abc}baz
TEST;
?>
--EXPECT--
foo\{bar}baz
foo\{ bar}baz
foobarbaz
foo\{bar}baz
foo\{ bar}baz
| PHP | 3 | guomoumou123/php5.5.10 | Zend/tests/bug35411.phpt | [
"PHP-3.01"
] |
<div class="ui hidden divider"></div>
<div class="ui styled fluid accordion">
{% for translation in catalog_promotion.translations %}
<div class="title">
<i class="dropdown icon"></i>
<i class="{{ translation.locale|slice(-2)|lower }} flag"></i>
{{ translation.locale|sylius_locale_name }}
</div>
<div class="ui content">
<table class="ui very basic celled table">
<tbody>
<tr>
<td class="three wide"><strong class="gray text">{{ 'sylius.ui.label'|trans }}</strong></td>
<td>{{ translation.label }}</td>
</tr>
<tr>
<td class="three wide"><strong class="gray text">{{ 'sylius.ui.description'|trans }}</strong></td>
<td>{{ translation.description|nl2br }}</td>
</tr>
</tbody>
</table>
</div>
{% endfor %}
</div>
| Twig | 4 | c2cn/Sylius | src/Sylius/Bundle/AdminBundle/Resources/views/CatalogPromotion/Show/_translations.html.twig | [
"MIT"
] |
%%
%% Licensed to the Apache Software Foundation (ASF) under one
%% or more contributor license agreements. See the NOTICE file
%% distributed with this work for additional information
%% regarding copyright ownership. The ASF licenses this file
%% to you under the Apache License, Version 2.0 (the
%% "License"); you may not use this file except in compliance
%% with the License. You may obtain a copy of the License at
%%
%% http://www.apache.org/licenses/LICENSE-2.0
%%
%% Unless required by applicable law or agreed to in writing,
%% software distributed under the License is distributed on an
%% "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
%% KIND, either express or implied. See the License for the
%% specific language governing permissions and limitations
%% under the License.
%%
% don't rename this thrift_test, it clobbers generated files
-module(thrift_test_test).
-compile(export_all).
-include_lib("eunit/include/eunit.hrl").
-include("gen-erl/thrift_test_constants.hrl").
constant_test_() ->
[
{"myNumberz equals 1", ?_assertEqual(1, ?THRIFT_TEST_MYNUMBERZ)}
].
record_generation_test_() ->
[
{"Bonk record", ?_assertMatch(
{'thrift.test.Bonk', _, _},
#'thrift.test.Bonk'{message=null,type=null}
)},
{"Bools record", ?_assertMatch(
{'thrift.test.Bools', _, _},
#'thrift.test.Bools'{im_true=null,im_false=null}
)},
{"Xtruct record", ?_assertMatch(
{'thrift.test.Xtruct', _, _, _, _},
#'thrift.test.Xtruct'{string_thing=null,byte_thing=null,i32_thing=null,i64_thing=null}
)},
{"Xtruct2 record", ?_assertMatch(
{'thrift.test.Xtruct2', _, _, _},
#'thrift.test.Xtruct2'{byte_thing=null,struct_thing=null,i32_thing=null}
)},
{"Xtruct3 record", ?_assertMatch(
{'thrift.test.Xtruct3', _, _, _, _},
#'thrift.test.Xtruct3'{string_thing=null,changed=null,i32_thing=null,i64_thing=null}
)},
{"Insanity record", ?_assertMatch(
{'thrift.test.Insanity', _, _},
#'thrift.test.Insanity'{userMap=null,xtructs=null}
)},
{"CrazyNesting record", ?_assertMatch(
{'thrift.test.CrazyNesting', _, _, _, _},
#'thrift.test.CrazyNesting'{
string_field=null,
set_field=null,
list_field=null,
binary_field=null
}
)},
{"Xception record", ?_assertMatch(
{'thrift.test.Xception', _, _},
#'thrift.test.Xception'{errorCode=null,message=null}
)},
{"Xception2 record", ?_assertMatch(
{'thrift.test.Xception2', _, _},
#'thrift.test.Xception2'{errorCode=null,struct_thing=null}
)},
{"EmptyStruct record", ?_assertMatch({'thrift.test.EmptyStruct'}, #'thrift.test.EmptyStruct'{})},
{"OneField record", ?_assertMatch({'thrift.test.OneField', _}, #'thrift.test.OneField'{field=null})},
{"VersioningTestV1 record", ?_assertMatch(
{'thrift.test.VersioningTestV1', _, _, _},
#'thrift.test.VersioningTestV1'{begin_in_both=null,old_string=null,end_in_both=null}
)},
{"VersioningTestV2 record", ?_assertMatch(
{'thrift.test.VersioningTestV2', _, _, _, _, _, _, _, _, _, _, _, _},
#'thrift.test.VersioningTestV2'{
begin_in_both=null,
newint=null,
newbyte=null,
newshort=null,
newlong=null,
newdouble=null,
newstruct=null,
newlist=null,
newset=null,
newmap=null,
newstring=null,
end_in_both=null
}
)},
{"ListTypeVersioningV1 record", ?_assertMatch(
{'thrift.test.ListTypeVersioningV1', _, _},
#'thrift.test.ListTypeVersioningV1'{myints=null,hello=null}
)},
{"ListTypeVersioningV2 record", ?_assertMatch(
{'thrift.test.ListTypeVersioningV2', _, _},
#'thrift.test.ListTypeVersioningV2'{strings=null,hello=null}
)},
{"GuessProtocolStruct record", ?_assertMatch(
{'thrift.test.GuessProtocolStruct', _},
#'thrift.test.GuessProtocolStruct'{map_field=null}
)},
{"LargeDeltas record", ?_assertMatch(
{'thrift.test.LargeDeltas', _, _, _, _, _, _, _, _, _, _},
#'thrift.test.LargeDeltas'{
b1=null,
b10=null,
b100=null,
check_true=null,
b1000=null,
check_false=null,
vertwo2000=null,
a_set2500=null,
vertwo3000=null,
big_numbers=null
}
)},
{"NestedListsI32x2 record", ?_assertMatch(
{'thrift.test.NestedListsI32x2', _},
#'thrift.test.NestedListsI32x2'{integerlist=null}
)},
{"NestedListsI32x3 record", ?_assertMatch(
{'thrift.test.NestedListsI32x3', _},
#'thrift.test.NestedListsI32x3'{integerlist=null}
)},
{"NestedMixedx2 record", ?_assertMatch(
{'thrift.test.NestedMixedx2', _, _, _},
#'thrift.test.NestedMixedx2'{
int_set_list=null,
map_int_strset=null,
map_int_strset_list=null
}
)},
{"ListBonks record", ?_assertMatch({'thrift.test.ListBonks', _}, #'thrift.test.ListBonks'{bonk=null})},
{"NestedListsBonk record", ?_assertMatch(
{'thrift.test.NestedListsBonk', _},
#'thrift.test.NestedListsBonk'{bonk=null}
)},
{"BoolTest record", ?_assertMatch(
{'thrift.test.BoolTest', _, _},
#'thrift.test.BoolTest'{b=null,s=null}
)},
{"StructA record", ?_assertMatch({'thrift.test.StructA', _}, #'thrift.test.StructA'{s=null})},
{"StructB record", ?_assertMatch(
{'thrift.test.StructB', _, _},
#'thrift.test.StructB'{aa=null,ab=null}
)}
].
struct_info_test_() ->
[
{"Bonk definition (short version)", ?_assertEqual(
{struct, [{1, string}, {2, i32}]},
thrift_test_types:struct_info('thrift.test.Bonk')
)},
{"Bonk definition", ?_assertEqual(
{struct, [
{1, undefined, string, message, undefined},
{2, undefined, i32, type, undefined}
]},
thrift_test_types:struct_info_ext('thrift.test.Bonk')
)},
{"Bools definition", ?_assertEqual(
{struct, [
{1, undefined, bool, im_true, undefined},
{2, undefined, bool, im_false, undefined}
]},
thrift_test_types:struct_info_ext('thrift.test.Bools')
)},
{"Xtruct definition", ?_assertEqual(
{struct, [
{1, undefined, string, string_thing, undefined},
{4, undefined, byte, byte_thing, undefined},
{9, undefined, i32, i32_thing, undefined},
{11, undefined, i64, i64_thing, undefined}
]},
thrift_test_types:struct_info_ext('thrift.test.Xtruct')
)},
{"Xtruct2 definition", ?_assertEqual(
{struct, [
{1, undefined, byte, byte_thing, undefined},
{2, undefined, {struct, {'thrift_test_types', 'thrift.test.Xtruct'}}, struct_thing, #'thrift.test.Xtruct'{}},
{3, undefined, i32, i32_thing, undefined}
]},
thrift_test_types:struct_info_ext('thrift.test.Xtruct2')
)},
{"Xtruct3 definition", ?_assertEqual(
{struct, [
{1, undefined, string, string_thing, undefined},
{4, undefined, i32, changed, undefined},
{9, undefined, i32, i32_thing, undefined},
{11, undefined, i64, i64_thing, undefined}
]},
thrift_test_types:struct_info_ext('thrift.test.Xtruct3')
)},
{"Insanity definition", ?_assertEqual(
{struct, [
{1, undefined, {map, i32, i64}, userMap, dict:new()},
{2, undefined, {list, {struct, {'thrift_test_types', 'thrift.test.Xtruct'}}}, xtructs, []}
]},
thrift_test_types:struct_info_ext('thrift.test.Insanity')
)},
{"CrazyNesting definition", ?_assertEqual(
{struct, [
{1, undefined, string, string_field, undefined},
{2, optional, {set, {struct, {'thrift_test_types', 'thrift.test.Insanity'}}}, set_field, sets:new()},
{3, required, {list, {map,
{set, i32},
{map, i32, {set, {list, {map, {struct, {'thrift_test_types', 'thrift.test.Insanity'}}, string}}}}
}}, list_field, []},
{4, undefined, string, binary_field, undefined}
]},
thrift_test_types:struct_info_ext('thrift.test.CrazyNesting')
)},
{"Xception definition", ?_assertEqual(
{struct, [
{1, undefined, i32, errorCode, undefined},
{2, undefined, string, message, undefined}
]},
thrift_test_types:struct_info_ext('thrift.test.Xception')
)},
{"Xception2 definition", ?_assertEqual(
{struct, [
{1, undefined, i32, errorCode, undefined},
{2, undefined, {struct, {'thrift_test_types', 'thrift.test.Xtruct'}}, struct_thing, #'thrift.test.Xtruct'{}}
]},
thrift_test_types:struct_info_ext('thrift.test.Xception2')
)},
{"EmptyStruct definition", ?_assertEqual(
{struct, []},
thrift_test_types:struct_info_ext('thrift.test.EmptyStruct')
)},
{"OneField definition", ?_assertEqual(
{struct, [
{1, undefined, {struct, {'thrift_test_types', 'thrift.test.EmptyStruct'}}, field, #'thrift.test.EmptyStruct'{}}
]},
thrift_test_types:struct_info_ext('thrift.test.OneField')
)},
{"VersioningTestV1 definition", ?_assertEqual(
{struct, [
{1, undefined, i32, begin_in_both, undefined},
{3, undefined, string, old_string, undefined},
{12, undefined, i32, end_in_both, undefined}
]},
thrift_test_types:struct_info_ext('thrift.test.VersioningTestV1')
)},
{"VersioningTestV2 definition", ?_assertEqual(
{struct, [
{1, undefined, i32, begin_in_both, undefined},
{2, undefined, i32, newint, undefined},
{3, undefined, byte, newbyte, undefined},
{4, undefined, i16, newshort, undefined},
{5, undefined, i64, newlong, undefined},
{6, undefined, double, newdouble, undefined},
{7, undefined, {struct, {thrift_test_types, 'thrift.test.Bonk'}}, newstruct, #'thrift.test.Bonk'{}},
{8, undefined, {list, i32}, newlist, []},
{9, undefined, {set, i32}, newset, sets:new()},
{10, undefined, {map, i32, i32}, newmap, dict:new()},
{11, undefined, string, newstring, undefined},
{12, undefined, i32, end_in_both, undefined}
]},
thrift_test_types:struct_info_ext('thrift.test.VersioningTestV2')
)},
{"ListTypeVersioningV1 definition", ?_assertEqual(
{struct, [
{1, undefined, {list, i32}, myints, []},
{2, undefined, string, hello, undefined}
]},
thrift_test_types:struct_info_ext('thrift.test.ListTypeVersioningV1')
)},
{"ListTypeVersioningV2 definition", ?_assertEqual(
{struct, [
{1, undefined, {list, string}, strings, []},
{2, undefined, string, hello, undefined}
]},
thrift_test_types:struct_info_ext('thrift.test.ListTypeVersioningV2')
)},
{"GuessProtocolStruct definition", ?_assertEqual(
{struct, [
{7, undefined, {map, string, string}, map_field, dict:new()}
]},
thrift_test_types:struct_info_ext('thrift.test.GuessProtocolStruct')
)},
{"LargeDeltas definition", ?_assertEqual(
{struct, [
{1, undefined, {struct, {thrift_test_types, 'thrift.test.Bools'}}, b1, #'thrift.test.Bools'{}},
{10, undefined, {struct, {thrift_test_types, 'thrift.test.Bools'}}, b10, #'thrift.test.Bools'{}},
{100, undefined, {struct, {thrift_test_types, 'thrift.test.Bools'}}, b100, #'thrift.test.Bools'{}},
{500, undefined, bool, check_true, undefined},
{1000, undefined, {struct, {thrift_test_types, 'thrift.test.Bools'}}, b1000, #'thrift.test.Bools'{}},
{1500, undefined, bool, check_false, undefined},
{2000, undefined, {struct, {thrift_test_types, 'thrift.test.VersioningTestV2'}}, vertwo2000, #'thrift.test.VersioningTestV2'{}},
{2500, undefined, {set, string}, a_set2500, sets:new()},
{3000, undefined, {struct, {thrift_test_types, 'thrift.test.VersioningTestV2'}}, vertwo3000, #'thrift.test.VersioningTestV2'{}},
{4000, undefined, {list, i32}, big_numbers, []}
]},
thrift_test_types:struct_info_ext('thrift.test.LargeDeltas')
)},
{"NestedListsI32x2 definition", ?_assertEqual(
{struct, [
{1, undefined, {list, {list, i32}}, integerlist, []}
]},
thrift_test_types:struct_info_ext('thrift.test.NestedListsI32x2')
)},
{"NestedListsI32x3 definition", ?_assertEqual(
{struct, [
{1, undefined, {list, {list, {list, i32}}}, integerlist, []}
]},
thrift_test_types:struct_info_ext('thrift.test.NestedListsI32x3')
)},
{"NestedMixedx2 definition", ?_assertEqual(
{struct, [
{1, undefined, {list, {set, i32}}, int_set_list, []},
{2, undefined, {map, i32, {set, string}}, map_int_strset, dict:new()},
{3, undefined, {list, {map, i32, {set, string}}}, map_int_strset_list, []}
]},
thrift_test_types:struct_info_ext('thrift.test.NestedMixedx2')
)},
{"ListBonks definition", ?_assertEqual(
{struct, [
{1, undefined, {list, {struct, {thrift_test_types, 'thrift.test.Bonk'}}}, bonk, []}
]},
thrift_test_types:struct_info_ext('thrift.test.ListBonks')
)},
{"NestedListsBonk definition", ?_assertEqual(
{struct, [
{1, undefined, {list, {list, {list, {struct, {thrift_test_types, 'thrift.test.Bonk'}}}}}, bonk, []}
]},
thrift_test_types:struct_info_ext('thrift.test.NestedListsBonk')
)},
{"BoolTest definition", ?_assertEqual(
{struct, [
{1, optional, bool, b, true},
{2, optional, string, s, "true"}
]},
thrift_test_types:struct_info_ext('thrift.test.BoolTest')
)},
{"StructA definition", ?_assertEqual(
{struct, [{1, required, string, s, undefined}]},
thrift_test_types:struct_info_ext('thrift.test.StructA')
)},
{"StructB definition", ?_assertEqual(
{struct, [
{1, optional, {struct, {thrift_test_types, 'thrift.test.StructA'}}, aa, #'thrift.test.StructA'{}},
{2, required, {struct, {thrift_test_types, 'thrift.test.StructA'}}, ab, #'thrift.test.StructA'{}}
]},
thrift_test_types:struct_info_ext('thrift.test.StructB')
)}
].
service_info_test_() ->
[
{"testVoid params", ?_assertEqual(
{struct, []},
thrift_test_thrift:function_info(testVoid, params_type)
)},
{"testVoid reply", ?_assertEqual(
{struct, []},
thrift_test_thrift:function_info(testVoid, reply_type)
)},
{"testVoid exceptions", ?_assertEqual(
{struct, []},
thrift_test_thrift:function_info(testVoid, exceptions)
)},
{"testString params", ?_assertEqual(
{struct, [{1, string}]},
thrift_test_thrift:function_info(testString, params_type)
)},
{"testString reply", ?_assertEqual(
string,
thrift_test_thrift:function_info(testString, reply_type)
)},
{"testString exceptions", ?_assertEqual(
{struct, []},
thrift_test_thrift:function_info(testString, exceptions)
)},
{"testByte params", ?_assertEqual(
{struct, [{1, byte}]},
thrift_test_thrift:function_info(testByte, params_type)
)},
{"testByte reply", ?_assertEqual(
byte,
thrift_test_thrift:function_info(testByte, reply_type)
)},
{"testByte exceptions", ?_assertEqual(
{struct, []},
thrift_test_thrift:function_info(testByte, exceptions)
)},
{"testI32 params", ?_assertEqual(
{struct, [{1, i32}]},
thrift_test_thrift:function_info(testI32, params_type)
)},
{"testI32 reply", ?_assertEqual(
i32,
thrift_test_thrift:function_info(testI32, reply_type)
)},
{"testI32 exceptions", ?_assertEqual(
{struct, []},
thrift_test_thrift:function_info(testI32, exceptions)
)},
{"testI64 params", ?_assertEqual(
{struct, [{1, i64}]},
thrift_test_thrift:function_info(testI64, params_type)
)},
{"testI64 reply", ?_assertEqual(
i64,
thrift_test_thrift:function_info(testI64, reply_type)
)},
{"testI64 exceptions", ?_assertEqual(
{struct, []},
thrift_test_thrift:function_info(testI64, exceptions)
)},
{"testDouble params", ?_assertEqual(
{struct, [{1, double}]},
thrift_test_thrift:function_info(testDouble, params_type)
)},
{"testDouble reply", ?_assertEqual(
double,
thrift_test_thrift:function_info(testDouble, reply_type)
)},
{"testDouble exceptions", ?_assertEqual(
{struct, []},
thrift_test_thrift:function_info(testDouble, exceptions)
)},
{"testStruct params", ?_assertEqual(
{struct, [
{1, {struct, {thrift_test_types, 'thrift.test.Xtruct'}}}
]},
thrift_test_thrift:function_info(testStruct, params_type)
)},
{"testStruct reply", ?_assertEqual(
{struct, {thrift_test_types, 'thrift.test.Xtruct'}},
thrift_test_thrift:function_info(testStruct, reply_type)
)},
{"testStruct exceptions", ?_assertEqual(
{struct, []},
thrift_test_thrift:function_info(testStruct, exceptions)
)},
{"testNest params", ?_assertEqual(
{struct, [
{1, {struct, {thrift_test_types, 'thrift.test.Xtruct2'}}}
]},
thrift_test_thrift:function_info(testNest, params_type)
)},
{"testNest reply", ?_assertEqual(
{struct, {thrift_test_types, 'thrift.test.Xtruct2'}},
thrift_test_thrift:function_info(testNest, reply_type)
)},
{"testNest exceptions", ?_assertEqual(
{struct, []},
thrift_test_thrift:function_info(testNest, exceptions)
)},
{"testMap params", ?_assertEqual(
{struct, [
{1, {map, i32, i32}}
]},
thrift_test_thrift:function_info(testMap, params_type)
)},
{"testMap reply", ?_assertEqual(
{map, i32, i32},
thrift_test_thrift:function_info(testMap, reply_type)
)},
{"testMap exceptions", ?_assertEqual(
{struct, []},
thrift_test_thrift:function_info(testMap, exceptions)
)},
{"testStringMap params", ?_assertEqual(
{struct, [
{1, {map, string, string}}
]},
thrift_test_thrift:function_info(testStringMap, params_type)
)},
{"testStringMap reply", ?_assertEqual(
{map, string, string},
thrift_test_thrift:function_info(testStringMap, reply_type)
)},
{"testStringMap exceptions", ?_assertEqual(
{struct, []},
thrift_test_thrift:function_info(testStringMap, exceptions)
)},
{"testSet params", ?_assertEqual(
{struct, [
{1, {set, i32}}
]},
thrift_test_thrift:function_info(testSet, params_type)
)},
{"testSet reply", ?_assertEqual(
{set, i32},
thrift_test_thrift:function_info(testSet, reply_type)
)},
{"testSet exceptions", ?_assertEqual(
{struct, []},
thrift_test_thrift:function_info(testSet, exceptions)
)},
{"testList params", ?_assertEqual(
{struct, [
{1, {list, i32}}
]},
thrift_test_thrift:function_info(testList, params_type)
)},
{"testList reply", ?_assertEqual(
{list, i32},
thrift_test_thrift:function_info(testList, reply_type)
)},
{"testList exceptions", ?_assertEqual(
{struct, []},
thrift_test_thrift:function_info(testList, exceptions)
)},
{"testEnum params", ?_assertEqual(
{struct, [
{1, i32}
]},
thrift_test_thrift:function_info(testEnum, params_type)
)},
{"testEnum reply", ?_assertEqual(
i32,
thrift_test_thrift:function_info(testEnum, reply_type)
)},
{"testEnum exceptions", ?_assertEqual(
{struct, []},
thrift_test_thrift:function_info(testEnum, exceptions)
)},
{"testTypedef params", ?_assertEqual(
{struct, [{1, i64}]},
thrift_test_thrift:function_info(testTypedef, params_type)
)},
{"testTypedef reply", ?_assertEqual(
i64,
thrift_test_thrift:function_info(testTypedef, reply_type)
)},
{"testTypedef exceptions", ?_assertEqual(
{struct, []},
thrift_test_thrift:function_info(testTypedef, exceptions)
)},
{"testMapMap params", ?_assertEqual(
{struct, [
{1, i32}
]},
thrift_test_thrift:function_info(testMapMap, params_type)
)},
{"testMapMap reply", ?_assertEqual(
{map, i32, {map, i32,i32}},
thrift_test_thrift:function_info(testMapMap, reply_type)
)},
{"testMapMap exceptions", ?_assertEqual(
{struct, []},
thrift_test_thrift:function_info(testMapMap, exceptions)
)},
{"testInsanity params", ?_assertEqual(
{struct, [
{1, {struct, {thrift_test_types, 'thrift.test.Insanity'}}}
]},
thrift_test_thrift:function_info(testInsanity, params_type)
)},
{"testInsanity reply", ?_assertEqual(
{map, i64, {map, i32, {struct, {'thrift_test_types', 'thrift.test.Insanity'}}}},
thrift_test_thrift:function_info(testInsanity, reply_type)
)},
{"testInsanity exceptions", ?_assertEqual(
{struct, []},
thrift_test_thrift:function_info(testInsanity, exceptions)
)},
{"testMulti params", ?_assertEqual(
{struct, [
{1, byte},
{2, i32},
{3, i64},
{4, {map, i16, string}},
{5, i32},
{6, i64}
]},
thrift_test_thrift:function_info(testMulti, params_type)
)},
{"testMulti reply", ?_assertEqual(
{struct, {thrift_test_types, 'thrift.test.Xtruct'}},
thrift_test_thrift:function_info(testMulti, reply_type)
)},
{"testMulti exceptions", ?_assertEqual(
{struct, []},
thrift_test_thrift:function_info(testMulti, exceptions)
)},
{"testException params", ?_assertEqual(
{struct, [{1, string}]},
thrift_test_thrift:function_info(testException, params_type)
)},
{"testException reply", ?_assertEqual(
{struct, []},
thrift_test_thrift:function_info(testException, reply_type)
)},
{"testException exceptions", ?_assertEqual(
{struct, [
{1, {struct, {thrift_test_types, 'thrift.test.Xception'}}}
]},
thrift_test_thrift:function_info(testException, exceptions)
)},
{"testMultiException params", ?_assertEqual(
{struct, [{1, string}, {2, string}]},
thrift_test_thrift:function_info(testMultiException, params_type)
)},
{"testMultiException reply", ?_assertEqual(
{struct, {thrift_test_types, 'thrift.test.Xtruct'}},
thrift_test_thrift:function_info(testMultiException, reply_type)
)},
{"testMultiException exceptions", ?_assertEqual(
{struct, [
{1, {struct, {thrift_test_types, 'thrift.test.Xception'}}},
{2, {struct, {thrift_test_types, 'thrift.test.Xception2'}}}
]},
thrift_test_thrift:function_info(testMultiException, exceptions)
)},
{"testOneway params", ?_assertEqual(
{struct, [{1, i32}]},
thrift_test_thrift:function_info(testOneway, params_type)
)},
{"testOneway reply", ?_assertEqual(
oneway_void,
thrift_test_thrift:function_info(testOneway, reply_type)
)},
{"testOneway exceptions", ?_assertEqual(
{struct, []},
thrift_test_thrift:function_info(testOneway, exceptions)
)},
{"secondtestString params", ?_assertEqual(
{struct, [{1, string}]},
second_service_thrift:function_info(secondtestString, params_type)
)},
{"secondtestString reply", ?_assertEqual(
string,
second_service_thrift:function_info(secondtestString, reply_type)
)},
{"secondtestString exceptions", ?_assertEqual(
{struct, []},
second_service_thrift:function_info(secondtestString, exceptions)
)}
].
| Erlang | 5 | Jimexist/thrift | lib/erl/test/thrift_test_test.erl | [
"Apache-2.0"
] |
#!/bin/bash
set -ex
[ -n "${VULKAN_SDK_VERSION}" ]
retry () {
$* || (sleep 1 && $*) || (sleep 2 && $*) || (sleep 4 && $*) || (sleep 8 && $*)
}
_vulkansdk_dir=/var/lib/jenkins/vulkansdk
_tmp_vulkansdk_targz=/tmp/vulkansdk.tar.gz
curl \
--silent \
--show-error \
--location \
--fail \
--retry 3 \
--output "${_tmp_vulkansdk_targz}" "https://ossci-android.s3.amazonaws.com/vulkansdk-linux-x86_64-${VULKAN_SDK_VERSION}.tar.gz"
mkdir -p "${_vulkansdk_dir}"
tar -C "${_vulkansdk_dir}" -xzf "${_tmp_vulkansdk_targz}" --strip-components 1
rm -rf "${_tmp_vulkansdk_targz}"
| Shell | 4 | Hacky-DH/pytorch | .circleci/docker/common/install_vulkan_sdk.sh | [
"Intel"
] |
proc tenTimes*(x: int): int = 10*x
| Nimrod | 4 | JohnAD/Nim | tests/dir with space/more spaces/mspace.nim | [
"MIT"
] |
@b: #f00;
| CartoCSS | 0 | nimix/carto | test/rendering/cross_stylesheet_variable_a.mss | [
"Apache-2.0"
] |
{% extends "base.ahk"%}
{% block body %}
TrayTip {{ title }}, {{ text }}, {{ second }}, {{ option }}
Sleep {{ second * 1000 }}
HideTrayTip()
; Copy this function into your script to use it.
HideTrayTip() {
TrayTip ; Attempt to hide it the normal way.
if SubStr(A_OSVersion,1,3) = "10." {
Menu Tray, NoIcon
Sleep 200 ; It may be necessary to adjust this sleep.
Menu Tray, Icon
}
}
{% endblock body %}
| AutoHotkey | 4 | scslmd/ahk | ahk/templates/gui/traytip.ahk | [
"MIT"
] |
SET group_by_two_level_threshold = 1, max_threads = 1;
SELECT
k,
anyLast(s)
FROM
(
SELECT
123456789 AS k,
'Hello 1234567890 1234567890 1234567890 1234567890 1234567890 1234567890' AS s
UNION ALL
SELECT
234567890,
'World 1234567890 1234567890 1234567890 1234567890 1234567890 1234567890'
)
GROUP BY k
WITH TOTALS
HAVING length(anyLast(s)) > 0
ORDER BY k;
/* There was a bug in implementation of WITH TOTALS.
* When there was more than one block after aggregation,
* nullptr is passed to IAggregateFunction::merge instead of pointer to valid Arena.
*
* To reproduce, we set 'group_by_two_level_threshold' to small value to enable two-level aggregation.
* Only in two-level aggregation there are many blocks after GROUP BY.
*
* Also use UNION ALL in subquery to generate two blocks before GROUP BY.
* Because two-level aggregation may be triggered only after a block is processed.
*
* Use large numbers as a key, because for 8, 16 bit numbers,
* two-level aggregation is not possible as simple aggregation method is used.
* These numbers are happy to hash to different buckets and we thus we have two blocks after GROUP BY.
*
* Also we use long strings (at least 64 bytes) in aggregation state,
* because aggregate functions min/max/any/anyLast use Arena only for long enough strings.
*
* And we use function 'anyLast' for method IAggregateFunction::merge to be called for every new value.
*
* We use useless HAVING (that is always true), because in absense of HAVING,
* TOTALS are calculated in a simple way in same pass during aggregation, not in TotalsHavingBlockInputStream,
* and bug doesn't trigger.
*
* We use ORDER BY for result of the test to be deterministic.
* max_threads = 1 for deterministic order of result in subquery and the value of 'anyLast'.
*/
| SQL | 4 | pdv-ru/ClickHouse | tests/queries/0_stateless/00558_aggregate_merge_totals_with_arenas.sql | [
"Apache-2.0"
] |
# Copyright (c) 2022 Fyde Innovations Limited and the openFyde Authors.
# Distributed under the license specified in the root directory of this project.
EAPI="5"
inherit appid
DESCRIPTION="Creates an app id for this build and update the lsb-release file"
HOMEPAGE="https://fydeos.io"
LICENSE="BSD"
SLOT="0"
KEYWORDS="*"
IUSE=""
RDEPEND=""
DEPEND="${RDEPEND}"
S="${WORKDIR}"
src_install() {
doappid "{4D3D2356-0ABF-4994-B191-9A16A11AC0C6}" "CHROMEBOX"
}
| Gentoo Ebuild | 3 | FydeOS/chromium_os_for_raspberry_pi | overlay-rpi4/chromeos-base/device-appid/device-appid-0.0.1.ebuild | [
"BSD-2-Clause"
] |
sleep 4
t gpio 43 sw out0
sleep 4
t gpio 37 sw out0
sleep 10
t gpio 37 sw out1
sleep 1
deletedir d:\DCIM
reboot yes
| AGS Script | 0 | waltersgrey/autoexechack | deletemedia/deletephoto/HERO2/autoexec.ash | [
"MIT"
] |
exec >&2
redo-ifchange "$2.dvi"
dvips -o "$3" "$2.dvi"
| Stata | 2 | BlameJohnny/redo | docs/cookbook/latex/default.ps.do | [
"Apache-2.0"
] |
/client/New()
..()
src << "Hello world!"
| DM | 0 | LaudateCorpus1/RosettaCodeData | Task/Hello-world-Text/DM/hello-world-text.dm | [
"Info-ZIP"
] |
<%@ page import="com.amazonaws.services.simpleworkflow.model.ActivityTypeInfo; com.netflix.asgard.EntityType" %>
<%--
Copyright 2012 Netflix, Inc.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
--%>
<html>
<head>
<meta http-equiv="Content-Type" content="text/html; charset=UTF-8"/>
<meta name="layout" content="main"/>
<title>Activity Types</title>
</head>
<body>
<div class="body">
<h1>Activity Types</h1>
<g:if test="${flash.message}">
<div class="message">${flash.message}</div>
</g:if>
<g:form method="post">
<div class="list">
<div class="buttons"></div>
<table class="sortable">
<thead>
<tr>
<th>Name</th>
<th>Version</th>
<th>Status</th>
<th>Description</th>
<th>Creation Date</th>
<th>Deprecation Date</th>
</tr>
</thead>
<tbody>
<g:each var="activityTypeInfo" in="${activityTypeInfos}" status="i">
<tr class="${(i % 2) == 0 ? 'odd' : 'even'}">
<td><g:link action="show" params="${[name: activityTypeInfo.activityType.name, version: activityTypeInfo.activityType.version]}">${activityTypeInfo.activityType.name}</g:link></td>
<td>${activityTypeInfo.activityType.version}</td>
<td>${activityTypeInfo.status}</td>
<td>${activityTypeInfo.description}</td>
<td><g:formatDate date="${activityTypeInfo.creationDate}"/></td>
<td><g:formatDate date="${activityTypeInfo.deprecationDate}"/></td>
</tr>
</g:each>
</tbody>
</table>
</div>
<footer/>
</g:form>
</div>
</body>
</html>
| Groovy Server Pages | 4 | Threadless/asgard | grails-app/views/activityType/list.gsp | [
"Apache-2.0"
] |
/* Copyright 2018 The TensorFlow Authors. All Rights Reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
==============================================================================*/
#include "absl/synchronization/mutex.h"
#include "tensorflow/compiler/xla/client/client_library.h"
#include "tensorflow/compiler/xla/client/xla_builder.h"
#include "tensorflow/compiler/xla/shape_util.h"
#include "tensorflow/core/lib/core/status_test_util.h"
#include "tensorflow/core/platform/env.h"
#include "tensorflow/core/platform/test.h"
namespace xla {
namespace {
StatusOr<XlaComputation> BuildComputation() {
XlaBuilder b("computation");
Shape scalar_s32 = ShapeUtil::MakeShape(S32, {});
XlaOp infeed = InfeedWithToken(CreateToken(&b), scalar_s32);
return b.Build(
OutfeedWithToken(GetTupleElement(infeed, 0) +
ConstantLiteral(&b, LiteralUtil::CreateR0<int32>(1)),
GetTupleElement(infeed, 1), scalar_s32, ""));
}
void CompileAndExecute(
LocalExecutable* executable, int device_ordinal, LocalClient* client,
absl::Mutex* results_mutex,
std::vector<std::pair<int, StatusOr<ScopedShapedBuffer>>>* results) {
xla::ExecutableRunOptions execute_options;
execute_options.set_intra_op_thread_pool(
client->backend().eigen_intra_op_thread_pool_device());
execute_options.set_device_ordinal(device_ordinal);
execute_options.set_allocator(
xla::ClientLibrary::GetXlaService(client->platform())
->backend()
.memory_allocator());
StatusOr<ScopedShapedBuffer> result =
executable->Run(absl::Span<const ShapedBuffer* const>(), execute_options);
{
absl::MutexLock lock(results_mutex);
results->emplace_back(device_ordinal, std::move(result));
}
}
void TestWithDeviceCount(const int device_count) {
// Run `device_count` copies of the XLA program built by BuildComputation.
TF_ASSERT_OK_AND_ASSIGN(
se::Platform* const platform,
perftools::gputools::MultiPlatformManager::PlatformWithName("Host"));
xla::LocalClientOptions client_options;
client_options.set_platform(platform);
TF_ASSERT_OK_AND_ASSIGN(
LocalClient* const client,
xla::ClientLibrary::GetOrCreateLocalClient(client_options));
TF_ASSERT_OK_AND_ASSIGN(XlaComputation xla_computation, BuildComputation());
TF_ASSERT_OK_AND_ASSIGN(
auto executables,
client->Compile(xla_computation, {}, xla::ExecutableBuildOptions{}));
std::unique_ptr<LocalExecutable> executable = std::move(executables[0]);
std::vector<tensorflow::Thread*> threads;
absl::Mutex results_mutex;
std::vector<std::pair<int, StatusOr<ScopedShapedBuffer>>> results;
tensorflow::Env* env = tensorflow::Env::Default();
for (int device_ordinal = 0; device_ordinal < device_count;
device_ordinal++) {
tensorflow::Thread* t = env->StartThread(
tensorflow::ThreadOptions{}, absl::StrCat("thread-", device_ordinal),
[&executable, device_ordinal, client, &results_mutex, &results] {
CompileAndExecute(executable.get(), device_ordinal, client,
&results_mutex, &results);
});
threads.push_back(t);
}
for (int device_ordinal = 0; device_ordinal < device_count;
device_ordinal++) {
TF_ASSERT_OK(client->TransferToInfeedLocal(
LiteralUtil::CreateR0<int32>(device_ordinal * 100), device_ordinal));
}
for (int device_ordinal = 0; device_ordinal < device_count;
device_ordinal++) {
Literal outfeed(ShapeUtil::MakeShape(S32, {}));
TF_ASSERT_OK(client->TransferFromOutfeedLocal(device_ordinal, &outfeed));
EXPECT_EQ(outfeed, LiteralUtil::CreateR0<int32>(device_ordinal * 100 + 1));
}
for (int device_ordinal = 0; device_ordinal < device_count;
device_ordinal++) {
delete threads[device_ordinal];
}
for (int device_ordinal = 0; device_ordinal < device_count;
device_ordinal++) {
TF_ASSERT_OK(results[device_ordinal].second.status());
}
}
// NB! This test requires --xla_force_host_platform_device_count=4
TEST(MultipleDeviceOnHostTest, OneDevice) { TestWithDeviceCount(1); }
TEST(MultipleDeviceOnHostTest, TwoDevices) { TestWithDeviceCount(2); }
TEST(MultipleDeviceOnHostTest, ThreeDevices) { TestWithDeviceCount(3); }
TEST(MultipleDeviceOnHostTest, FourDevices) { TestWithDeviceCount(4); }
} // namespace
} // namespace xla
| C++ | 4 | ashutom/tensorflow-upstream | tensorflow/compiler/xla/tests/multiple_devices_on_host_test.cc | [
"Apache-2.0"
] |
module.exports = "ok"; | JavaScript | 1 | 1shenxi/webpack | test/cases/parsing/bom/bomfile.js | [
"MIT"
] |
defmodule Mix.Tasks.Deps.Tree do
use Mix.Task
@shortdoc "Prints the dependency tree"
@recursive true
@moduledoc """
Prints the dependency tree.
mix deps.tree
If no dependency is given, it uses the tree defined in the `mix.exs` file.
## Command line options
* `--only` - the environment to show dependencies for
* `--target` - the target to show dependencies for
* `--exclude` - exclude dependencies which you do not want to see printed.
* `--format` - Can be set to one of either:
* `pretty` - uses Unicode code points for formatting the tree.
This is the default except on Windows.
* `plain` - does not use Unicode code points for formatting the tree.
This is the default on Windows.
* `dot` - produces a DOT graph description of the dependency tree
in `deps_tree.dot` in the current directory.
Warning: this will override any previously generated file.
"""
@switches [only: :string, target: :string, exclude: :keep, format: :string]
@impl true
def run(args) do
Mix.Project.get!()
{opts, args, _} = OptionParser.parse(args, switches: @switches)
deps_opts =
for {switch, key} <- [only: :env, target: :target],
value = opts[switch],
do: {key, :"#{value}"}
deps = Mix.Dep.load_on_environment(deps_opts)
root =
case args do
[] ->
Mix.Project.config()[:app] ||
Mix.raise("no application given and none found in mix.exs file")
[app] ->
app = String.to_atom(app)
find_dep(deps, app) || Mix.raise("could not find dependency #{app}")
end
if opts[:format] == "dot" do
callback = callback(&format_dot/1, deps, opts)
Mix.Utils.write_dot_graph!("deps_tree.dot", "dependency tree", [root], callback, opts)
"""
Generated "deps_tree.dot" in the current directory. To generate a PNG:
dot -Tpng deps_tree.dot -o deps_tree.png
For more options see http://www.graphviz.org/.
"""
|> String.trim_trailing()
|> Mix.shell().info()
else
callback = callback(&format_tree/1, deps, opts)
Mix.Utils.print_tree([root], callback, opts)
end
end
defp callback(formatter, deps, opts) do
excluded = Keyword.get_values(opts, :exclude) |> Enum.map(&String.to_atom/1)
top_level = Enum.filter(deps, & &1.top_level)
fn
%Mix.Dep{app: app} = dep ->
# Do not show dependencies if they were
# already shown at the top level
deps =
if not dep.top_level && find_dep(top_level, app) do
[]
else
find_dep(deps, app).deps
end
{formatter.(dep), exclude_and_sort(deps, excluded)}
app ->
{{Atom.to_string(app), nil}, exclude_and_sort(top_level, excluded)}
end
end
defp exclude_and_sort(deps, excluded) do
deps
|> Enum.reject(&(&1.app in excluded))
|> Enum.sort_by(& &1.app)
end
defp format_dot(%{app: app, requirement: requirement, opts: opts}) do
override =
if opts[:override] do
" *override*"
else
""
end
requirement = requirement && requirement(requirement)
{app, "#{requirement}#{override}"}
end
defp format_tree(%{app: app, scm: scm, requirement: requirement, opts: opts}) do
override =
if opts[:override] do
IO.ANSI.format([:bright, " *override*"])
else
""
end
requirement = requirement && "#{requirement(requirement)} "
{app, "#{requirement}(#{scm.format(opts)})#{override}"}
end
defp requirement(%Regex{} = regex), do: "#{inspect(regex)}"
defp requirement(binary) when is_binary(binary), do: binary
defp find_dep(deps, app) do
Enum.find(deps, &(&1.app == app))
end
end
| Elixir | 5 | doughsay/elixir | lib/mix/lib/mix/tasks/deps.tree.ex | [
"Apache-2.0"
] |
"""Diagnostics support for Sensibo."""
from __future__ import annotations
from typing import Any
from homeassistant.components.diagnostics.util import async_redact_data
from homeassistant.config_entries import ConfigEntry
from homeassistant.core import HomeAssistant
from .const import DOMAIN
from .coordinator import SensiboDataUpdateCoordinator
TO_REDACT = {
"location",
"ssid",
"id",
"macAddress",
"parentDeviceUid",
"qrId",
"serial",
"uid",
"email",
"firstName",
"lastName",
"username",
"podUid",
"deviceUid",
}
async def async_get_config_entry_diagnostics(
hass: HomeAssistant, entry: ConfigEntry
) -> dict[str, Any]:
"""Return diagnostics for a config entry."""
coordinator: SensiboDataUpdateCoordinator = hass.data[DOMAIN][entry.entry_id]
return async_redact_data(coordinator.data.raw, TO_REDACT)
| Python | 5 | MrDelik/core | homeassistant/components/sensibo/diagnostics.py | [
"Apache-2.0"
] |
function Worker () => Threads = this; class
(code) ->
@thread = t = Threads.create!
t.on \message (args) ~> @onmessage? data: args
t.on \error (args) ~> @onerror? args
t.on \close -> t.destroy!
@terminate = -> t.destroy!
@add-event-listener = (event, cb) ~>
if event is \message
@onmessage = cb
else
t.on event, cb
@dispatch-event = (event) -> t.emitSerialized event.type, event
@post-message = (data) -> t.emitSerialized \message {data}
if typeof code is \function
t.eval "(#code)()"
else if code?
t.load code
| LiveScript | 3 | mmig/node-webworker-threads | src/worker.ls | [
"CC0-1.0"
] |
export {
redef SSL::disable_analyzer_after_detection = T;
}
event ssl_established(c: connection) &priority=-7 {
mcore_apply_shunt_policy(c$id$orig_h, c$id$orig_p, c$id$resp_h, c$id$resp_p, T);
}
event ssh_server_version(c: connection, version: string) &priority=-7 {
mcore_apply_shunt_policy(c$id$orig_h, c$id$orig_p, c$id$resp_h, c$id$resp_p, T);
}
| Bro | 3 | reservoirlabs/bro-scripts | supercomputing/shunt-encrypted.bro | [
"Apache-2.0"
] |
<html>
<body>
<h2>Activation Failed</h2>
</body>
</html> | Java Server Pages | 0 | zeesh49/tutorials | spring-mvc-webflow/src/main/webapp/WEB-INF/view/failure.jsp | [
"MIT"
] |
package universe_test
import "testing"
import "math"
option now = () => 2030-01-01T00:00:00Z
inData =
"
#datatype,string,long,dateTime:RFC3339,double,string,string
#group,false,false,false,false,true,true
#default,_result,,,,,
,result,table,_time,_value,_field,_measurement
,,0,2018-05-22T00:00:00Z,110.46,used_percent,disk
,,0,2018-05-22T00:00:10Z,109.80,used_percent,disk
,,0,2018-05-22T00:00:20Z,110.17,used_percent,disk
,,0,2018-05-22T00:00:30Z,109.82,used_percent,disk
,,0,2018-05-22T00:00:40Z,110.15,used_percent,disk
,,0,2018-05-22T00:00:50Z,109.31,used_percent,disk
,,0,2018-05-22T00:01:00Z,109.05,used_percent,disk
,,0,2018-05-22T00:01:10Z,107.94,used_percent,disk
,,0,2018-05-22T00:01:20Z,107.76,used_percent,disk
,,0,2018-05-22T00:01:30Z,109.24,used_percent,disk
,,0,2018-05-22T00:01:40Z,109.40,used_percent,disk
,,0,2018-05-22T00:01:50Z,108.50,used_percent,disk
,,0,2018-05-22T00:02:00Z,107.96,used_percent,disk
,,0,2018-05-22T00:02:10Z,108.55,used_percent,disk
,,0,2018-05-22T00:02:20Z,108.85,used_percent,disk
,,0,2018-05-22T00:02:30Z,110.44,used_percent,disk
,,0,2018-05-22T00:02:40Z,109.89,used_percent,disk
,,0,2018-05-22T00:02:50Z,110.70,used_percent,disk
,,0,2018-05-22T00:03:00Z,110.79,used_percent,disk
,,0,2018-05-22T00:03:10Z,110.22,used_percent,disk
,,0,2018-05-22T00:03:20Z,110.00,used_percent,disk
,,0,2018-05-22T00:03:30Z,109.27,used_percent,disk
,,0,2018-05-22T00:03:40Z,106.69,used_percent,disk
,,0,2018-05-22T00:03:50Z,107.07,used_percent,disk
,,0,2018-05-22T00:04:00Z,107.92,used_percent,disk
,,0,2018-05-22T00:04:10Z,107.95,used_percent,disk
,,0,2018-05-22T00:04:20Z,107.70,used_percent,disk
,,0,2018-05-22T00:04:30Z,107.97,used_percent,disk
,,0,2018-05-22T00:04:40Z,106.09,used_percent,disk
"
outData =
"
#datatype,string,long,dateTime:RFC3339,double,string,string
#group,false,false,false,false,true,true
#default,_result,,,,,
,result,table,_time,_value,_field,_measurement
,,0,2018-05-22T00:01:40Z,109.24,used_percent,disk
,,0,2018-05-22T00:01:50Z,109.22,used_percent,disk
,,0,2018-05-22T00:02:00Z,109.12,used_percent,disk
,,0,2018-05-22T00:02:10Z,109.10,used_percent,disk
,,0,2018-05-22T00:02:20Z,109.09,used_percent,disk
,,0,2018-05-22T00:02:30Z,109.12,used_percent,disk
,,0,2018-05-22T00:02:40Z,109.14,used_percent,disk
,,0,2018-05-22T00:02:50Z,109.28,used_percent,disk
,,0,2018-05-22T00:03:00Z,109.44,used_percent,disk
,,0,2018-05-22T00:03:10Z,109.46,used_percent,disk
,,0,2018-05-22T00:03:20Z,109.47,used_percent,disk
,,0,2018-05-22T00:03:30Z,109.46,used_percent,disk
,,0,2018-05-22T00:03:40Z,109.39,used_percent,disk
,,0,2018-05-22T00:03:50Z,109.32,used_percent,disk
,,0,2018-05-22T00:04:00Z,109.29,used_percent,disk
,,0,2018-05-22T00:04:10Z,109.18,used_percent,disk
,,0,2018-05-22T00:04:20Z,109.08,used_percent,disk
,,0,2018-05-22T00:04:30Z,108.95,used_percent,disk
,,0,2018-05-22T00:04:40Z,108.42,used_percent,disk
"
kama = (table=<-) =>
table
|> range(start: 2018-05-22T00:00:00Z)
|> drop(columns: ["_start", "_stop"])
|> kaufmansAMA(n: 10)
|> map(fn: (r) => ({r with _value: math.round(x: r._value * 100.0) / 100.0}))
test _kama = () => ({input: testing.loadStorage(csv: inData), want: testing.loadMem(csv: outData), fn: kama})
| FLUX | 4 | metrico/flux | stdlib/universe/kama_v2_test.flux | [
"MIT"
] |
"""Prime Check."""
import math
import unittest
def prime_check(number: int) -> bool:
"""Checks to see if a number is a prime in O(sqrt(n)).
A number is prime if it has exactly two factors: 1 and itself.
>>> prime_check(0)
False
>>> prime_check(1)
False
>>> prime_check(2)
True
>>> prime_check(3)
True
>>> prime_check(27)
False
>>> prime_check(87)
False
>>> prime_check(563)
True
>>> prime_check(2999)
True
>>> prime_check(67483)
False
"""
if 1 < number < 4:
# 2 and 3 are primes
return True
elif number < 2 or not number % 2:
# Negatives, 0, 1 and all even numbers are not primes
return False
odd_numbers = range(3, int(math.sqrt(number) + 1), 2)
return not any(not number % i for i in odd_numbers)
class Test(unittest.TestCase):
def test_primes(self):
self.assertTrue(prime_check(2))
self.assertTrue(prime_check(3))
self.assertTrue(prime_check(5))
self.assertTrue(prime_check(7))
self.assertTrue(prime_check(11))
self.assertTrue(prime_check(13))
self.assertTrue(prime_check(17))
self.assertTrue(prime_check(19))
self.assertTrue(prime_check(23))
self.assertTrue(prime_check(29))
def test_not_primes(self):
self.assertFalse(
prime_check(-19),
"Negative numbers are excluded by definition of prime numbers.",
)
self.assertFalse(
prime_check(0),
"Zero doesn't have any positive factors, primes must have exactly two.",
)
self.assertFalse(
prime_check(1),
"One only has 1 positive factor, primes must have exactly two.",
)
self.assertFalse(prime_check(2 * 2))
self.assertFalse(prime_check(2 * 3))
self.assertFalse(prime_check(3 * 3))
self.assertFalse(prime_check(3 * 5))
self.assertFalse(prime_check(3 * 5 * 7))
if __name__ == "__main__":
unittest.main()
| Python | 5 | NavpreetDevpuri/Python | maths/prime_check.py | [
"MIT"
] |
R.<a,d,x1,y1,x2,y2> = QQ[]
S = R.quotient([a*x1^2+y1^2-1+d*x1^2*y1^2, a*x2^2+y2^2-1+d*x2^2*y2^2])
# the Edwards addition law:
x3 = (x1*y2+y1*x2)/(1+d*x1*x2*y1*y2)
y3 = (y1*y2-a*x1*x2)/(1-d*x1*x2*y1*y2)
| Sage | 3 | ShipChain/baseline | lib/circuits/ecc/edwards.sage | [
"CC0-1.0"
] |
package com.baeldung.traits
trait SpeakingTrait {
String basicAbility() {
return "Speaking!!"
}
String speakAndWalk() {
return "Speak and walk!!"
}
} | Groovy | 3 | DBatOWL/tutorials | core-groovy/src/main/groovy/com/baeldung/traits/SpeakingTrait.groovy | [
"MIT"
] |
<?php
# Generated by the protocol buffer compiler. DO NOT EDIT!
# source: src/proto/grpc/testing/messages.proto
namespace Grpc\Testing;
use Google\Protobuf\Internal\GPBType;
use Google\Protobuf\Internal\RepeatedField;
use Google\Protobuf\Internal\GPBUtil;
/**
* Client-streaming request.
*
* Generated from protobuf message <code>grpc.testing.StreamingInputCallRequest</code>
*/
class StreamingInputCallRequest extends \Google\Protobuf\Internal\Message
{
/**
* Optional input payload sent along with the request.
*
* Generated from protobuf field <code>.grpc.testing.Payload payload = 1;</code>
*/
protected $payload = null;
/**
* Whether the server should expect this request to be compressed. This field
* is "nullable" in order to interoperate seamlessly with servers not able to
* implement the full compression tests by introspecting the call to verify
* the request's compression status.
*
* Generated from protobuf field <code>.grpc.testing.BoolValue expect_compressed = 2;</code>
*/
protected $expect_compressed = null;
/**
* Constructor.
*
* @param array $data {
* Optional. Data for populating the Message object.
*
* @type \Grpc\Testing\Payload $payload
* Optional input payload sent along with the request.
* @type \Grpc\Testing\BoolValue $expect_compressed
* Whether the server should expect this request to be compressed. This field
* is "nullable" in order to interoperate seamlessly with servers not able to
* implement the full compression tests by introspecting the call to verify
* the request's compression status.
* }
*/
public function __construct($data = NULL) {
\GPBMetadata\Src\Proto\Grpc\Testing\Messages::initOnce();
parent::__construct($data);
}
/**
* Optional input payload sent along with the request.
*
* Generated from protobuf field <code>.grpc.testing.Payload payload = 1;</code>
* @return \Grpc\Testing\Payload
*/
public function getPayload()
{
return $this->payload;
}
/**
* Optional input payload sent along with the request.
*
* Generated from protobuf field <code>.grpc.testing.Payload payload = 1;</code>
* @param \Grpc\Testing\Payload $var
* @return $this
*/
public function setPayload($var)
{
GPBUtil::checkMessage($var, \Grpc\Testing\Payload::class);
$this->payload = $var;
return $this;
}
/**
* Whether the server should expect this request to be compressed. This field
* is "nullable" in order to interoperate seamlessly with servers not able to
* implement the full compression tests by introspecting the call to verify
* the request's compression status.
*
* Generated from protobuf field <code>.grpc.testing.BoolValue expect_compressed = 2;</code>
* @return \Grpc\Testing\BoolValue
*/
public function getExpectCompressed()
{
return $this->expect_compressed;
}
/**
* Whether the server should expect this request to be compressed. This field
* is "nullable" in order to interoperate seamlessly with servers not able to
* implement the full compression tests by introspecting the call to verify
* the request's compression status.
*
* Generated from protobuf field <code>.grpc.testing.BoolValue expect_compressed = 2;</code>
* @param \Grpc\Testing\BoolValue $var
* @return $this
*/
public function setExpectCompressed($var)
{
GPBUtil::checkMessage($var, \Grpc\Testing\BoolValue::class);
$this->expect_compressed = $var;
return $this;
}
}
| PHP | 5 | arghyadip01/grpc | src/php/tests/qps/generated_code/Grpc/Testing/StreamingInputCallRequest.php | [
"Apache-2.0"
] |
#lang scribble/doc
@(require scribble/manual)
@title{DrRacket: The Racket Programming Environment}
@author["Robert Bruce Findler" "PLT"]
DrRacket is a graphical environment for developing programs using the
Racket programming languages.
@table-of-contents[]
@; ----------------------------------------------------------------------
@include-section["interface-essentials.scrbl"]
@include-section["languages.scrbl"]
@include-section["interface-ref.scrbl"]
@include-section["extending.scrbl"]
@; ----------------------------------------------------------------------
@index-section[]
| Racket | 3 | rrthomas/drracket | drracket/scribblings/drracket/drracket.scrbl | [
"Apache-2.0",
"MIT"
] |
size: 1920px 1080px;
| CLIPS | 0 | asmuth-archive/travistest | test/layer/resize_px.clp | [
"Apache-2.0"
] |
@ tableflux . h2o_temperature { time > 0 , state , location , bottom_degrees , surface_degrees }
| FLUX | 0 | RohanSreerama5/flux | colm/tableflux/all.flux | [
"MIT"
] |
#!/usr/bin/env xdg-open
[Desktop Entry]
Version=1.0
Type=Application
Terminal=false
TryExec=/usr/bin/zap
Exec=/usr/bin/zap
Name=OWASP ZAP
Icon=/usr/share/icons/zapicon.png | desktop | 1 | a6k8s4/zaproxy | build/debian/owasp-zap.desktop | [
"Apache-2.0"
] |
<% template foo() %> | Tea | 0 | JavascriptID/sourcerer-app | src/test/resources/samples/langs/Tea/foo.tea | [
"MIT"
] |
<mt:ignore>
<__trans phrase="You can specify several configuration for a theme.">
</mt:ignore>
<mt:ignore><__trans phrase="Settings for meta keywords."></mt:ignore>
<mt:setvarblock name="metakeywords"><mt:blogname>,<mt:pages><mt:pagetitle>,</mt:pages></mt:setvarblock>
<mt:ignore><__trans phrase="Specify the URL of your Facebook page for displaying Facebook icon in all pages."></mt:ignore>
<mt:setvarblock name="facebook">xxxxxx</mt:setvarblock>
<mt:ignore><__trans phrase="Specify the URL of your Twitter account page for displaying Twitter icon in all pages."></mt:ignore>
<mt:setvarblock name="twitter">xxxxxx</mt:setvarblock>
<mt:Ignore><__trans phrase="Specify the AppID of Facebook for OGP. Also, specify the twitter account to embed in tweet."></mt:Ignore>
<mt:SetVarBlock name="fbAppId"></mt:SetVarBlock>
<mt:SetVarBlock name="twitterVia"></mt:SetVarBlock>
<mt:Ignore><__trans phrase="Specify 1 to displaying that social icons in archive page."></mt:Ignore>
<mt:SetVarBlock name="socialButtonFacebook">1</mt:SetVarBlock>
<mt:SetVarBlock name="socialButtonTwitter">1</mt:SetVarBlock>
<mt:SetVarBlock name="socialButtonHatena">1</mt:SetVarBlock>
<mt:SetVarBlock name="socialButtonPocket">1</mt:SetVarBlock>
<mt:SetVarBlock name="socialButtonLine">1</mt:SetVarBlock>
| MTML | 3 | movabletype/mt-theme-SimpleCorporate | themes/simplecorporate/templates/config.mtml | [
"MIT"
] |
{{ sylius_template_event('sylius.shop.taxon.header', {'taxon': taxon}) }}
| Twig | 1 | titomtd/Sylius | src/Sylius/Bundle/ShopBundle/Resources/views/Taxon/_header.html.twig | [
"MIT"
] |
/**
* This file is part of the Phalcon Framework.
*
* (c) Phalcon Team <team@phalcon.io>
*
* For the full copyright and license information, please view the LICENSE.txt
* file that was distributed with this source code.
*/
namespace Phalcon\Mvc\Model;
use Phalcon\Messages\Message;
use Phalcon\Mvc\ModelInterface;
/**
* Phalcon\Mvc\Model\ValidationFailed
*
* This exception is generated when a model fails to save a record
* Phalcon\Mvc\Model must be set up to have this behavior
*/
class ValidationFailed extends Exception
{
/**
* @var array
*/
protected messages = [];
/**
* @var ModelInterface
*/
protected model;
/**
* Phalcon\Mvc\Model\ValidationFailed constructor
*
* @param ModelInterface model
* @param Message[] validationMessages
*/
public function __construct(<ModelInterface> model, array! validationMessages)
{
var messageStr, message;
if count(validationMessages) > 0 {
/**
* Get the first message in the array
*/
let message = validationMessages[0];
/**
* Get the message to use it in the exception
*/
let messageStr = message->getMessage();
} else {
let messageStr = "Validation failed";
}
let this->model = model;
let this->messages = validationMessages;
parent::__construct(messageStr);
}
/**
* Returns the complete group of messages produced in the validation
*/
public function getMessages() -> <Message[]>
{
return this->messages;
}
/**
* Returns the model that generated the messages
*/
public function getModel() -> <ModelInterface>
{
return this->model;
}
}
| Zephir | 4 | tidytrax/cphalcon | phalcon/Mvc/Model/ValidationFailed.zep | [
"BSD-3-Clause"
] |
s="\nprocess.stdout.write('s='+JSON.stringify(s)+s)^!\n"
process.stdout.write('s='+JSON.stringify(s)+s)^!
| PogoScript | 1 | MakeNowJust/quine | quine.pogo | [
"Beerware"
] |
/*.js
!index.js
yarn.lock
| Handlebars | 0 | cwlsn/gatsby | plop-templates/package/.gitignore.hbs | [
"MIT"
] |
from datetime import datetime
import re
import numpy as np
import pytest
from pandas import (
DataFrame,
Index,
MultiIndex,
Series,
_testing as tm,
)
def test_extract_expand_kwarg_wrong_type_raises(any_string_dtype):
# TODO: should this raise TypeError
values = Series(["fooBAD__barBAD", np.nan, "foo"], dtype=any_string_dtype)
with pytest.raises(ValueError, match="expand must be True or False"):
values.str.extract(".*(BAD[_]+).*(BAD)", expand=None)
def test_extract_expand_kwarg(any_string_dtype):
s = Series(["fooBAD__barBAD", np.nan, "foo"], dtype=any_string_dtype)
expected = DataFrame(["BAD__", np.nan, np.nan], dtype=any_string_dtype)
result = s.str.extract(".*(BAD[_]+).*")
tm.assert_frame_equal(result, expected)
result = s.str.extract(".*(BAD[_]+).*", expand=True)
tm.assert_frame_equal(result, expected)
expected = DataFrame(
[["BAD__", "BAD"], [np.nan, np.nan], [np.nan, np.nan]], dtype=any_string_dtype
)
result = s.str.extract(".*(BAD[_]+).*(BAD)", expand=False)
tm.assert_frame_equal(result, expected)
def test_extract_expand_False_mixed_object():
ser = Series(
["aBAD_BAD", np.nan, "BAD_b_BAD", True, datetime.today(), "foo", None, 1, 2.0]
)
# two groups
result = ser.str.extract(".*(BAD[_]+).*(BAD)", expand=False)
er = [np.nan, np.nan] # empty row
expected = DataFrame([["BAD_", "BAD"], er, ["BAD_", "BAD"], er, er, er, er, er, er])
tm.assert_frame_equal(result, expected)
# single group
result = ser.str.extract(".*(BAD[_]+).*BAD", expand=False)
expected = Series(
["BAD_", np.nan, "BAD_", np.nan, np.nan, np.nan, np.nan, np.nan, np.nan]
)
tm.assert_series_equal(result, expected)
def test_extract_expand_index_raises():
# GH9980
# Index only works with one regex group since
# multi-group would expand to a frame
idx = Index(["A1", "A2", "A3", "A4", "B5"])
msg = "only one regex group is supported with Index"
with pytest.raises(ValueError, match=msg):
idx.str.extract("([AB])([123])", expand=False)
def test_extract_expand_no_capture_groups_raises(index_or_series, any_string_dtype):
s_or_idx = index_or_series(["A1", "B2", "C3"], dtype=any_string_dtype)
msg = "pattern contains no capture groups"
# no groups
with pytest.raises(ValueError, match=msg):
s_or_idx.str.extract("[ABC][123]", expand=False)
# only non-capturing groups
with pytest.raises(ValueError, match=msg):
s_or_idx.str.extract("(?:[AB]).*", expand=False)
def test_extract_expand_single_capture_group(index_or_series, any_string_dtype):
# single group renames series/index properly
s_or_idx = index_or_series(["A1", "A2"], dtype=any_string_dtype)
result = s_or_idx.str.extract(r"(?P<uno>A)\d", expand=False)
expected = index_or_series(["A", "A"], name="uno", dtype=any_string_dtype)
if index_or_series == Series:
tm.assert_series_equal(result, expected)
else:
tm.assert_index_equal(result, expected)
def test_extract_expand_capture_groups(any_string_dtype):
s = Series(["A1", "B2", "C3"], dtype=any_string_dtype)
# one group, no matches
result = s.str.extract("(_)", expand=False)
expected = Series([np.nan, np.nan, np.nan], dtype=any_string_dtype)
tm.assert_series_equal(result, expected)
# two groups, no matches
result = s.str.extract("(_)(_)", expand=False)
expected = DataFrame(
[[np.nan, np.nan], [np.nan, np.nan], [np.nan, np.nan]], dtype=any_string_dtype
)
tm.assert_frame_equal(result, expected)
# one group, some matches
result = s.str.extract("([AB])[123]", expand=False)
expected = Series(["A", "B", np.nan], dtype=any_string_dtype)
tm.assert_series_equal(result, expected)
# two groups, some matches
result = s.str.extract("([AB])([123])", expand=False)
expected = DataFrame(
[["A", "1"], ["B", "2"], [np.nan, np.nan]], dtype=any_string_dtype
)
tm.assert_frame_equal(result, expected)
# one named group
result = s.str.extract("(?P<letter>[AB])", expand=False)
expected = Series(["A", "B", np.nan], name="letter", dtype=any_string_dtype)
tm.assert_series_equal(result, expected)
# two named groups
result = s.str.extract("(?P<letter>[AB])(?P<number>[123])", expand=False)
expected = DataFrame(
[["A", "1"], ["B", "2"], [np.nan, np.nan]],
columns=["letter", "number"],
dtype=any_string_dtype,
)
tm.assert_frame_equal(result, expected)
# mix named and unnamed groups
result = s.str.extract("([AB])(?P<number>[123])", expand=False)
expected = DataFrame(
[["A", "1"], ["B", "2"], [np.nan, np.nan]],
columns=[0, "number"],
dtype=any_string_dtype,
)
tm.assert_frame_equal(result, expected)
# one normal group, one non-capturing group
result = s.str.extract("([AB])(?:[123])", expand=False)
expected = Series(["A", "B", np.nan], dtype=any_string_dtype)
tm.assert_series_equal(result, expected)
# two normal groups, one non-capturing group
s = Series(["A11", "B22", "C33"], dtype=any_string_dtype)
result = s.str.extract("([AB])([123])(?:[123])", expand=False)
expected = DataFrame(
[["A", "1"], ["B", "2"], [np.nan, np.nan]], dtype=any_string_dtype
)
tm.assert_frame_equal(result, expected)
# one optional group followed by one normal group
s = Series(["A1", "B2", "3"], dtype=any_string_dtype)
result = s.str.extract("(?P<letter>[AB])?(?P<number>[123])", expand=False)
expected = DataFrame(
[["A", "1"], ["B", "2"], [np.nan, "3"]],
columns=["letter", "number"],
dtype=any_string_dtype,
)
tm.assert_frame_equal(result, expected)
# one normal group followed by one optional group
s = Series(["A1", "B2", "C"], dtype=any_string_dtype)
result = s.str.extract("(?P<letter>[ABC])(?P<number>[123])?", expand=False)
expected = DataFrame(
[["A", "1"], ["B", "2"], ["C", np.nan]],
columns=["letter", "number"],
dtype=any_string_dtype,
)
tm.assert_frame_equal(result, expected)
def test_extract_expand_capture_groups_index(index, any_string_dtype):
# https://github.com/pandas-dev/pandas/issues/6348
# not passing index to the extractor
data = ["A1", "B2", "C"]
if len(index) < len(data):
pytest.skip("Index too short")
index = index[: len(data)]
s = Series(data, index=index, dtype=any_string_dtype)
result = s.str.extract(r"(\d)", expand=False)
expected = Series(["1", "2", np.nan], index=index, dtype=any_string_dtype)
tm.assert_series_equal(result, expected)
result = s.str.extract(r"(?P<letter>\D)(?P<number>\d)?", expand=False)
expected = DataFrame(
[["A", "1"], ["B", "2"], ["C", np.nan]],
columns=["letter", "number"],
index=index,
dtype=any_string_dtype,
)
tm.assert_frame_equal(result, expected)
def test_extract_single_series_name_is_preserved(any_string_dtype):
s = Series(["a3", "b3", "c2"], name="bob", dtype=any_string_dtype)
result = s.str.extract(r"(?P<sue>[a-z])", expand=False)
expected = Series(["a", "b", "c"], name="sue", dtype=any_string_dtype)
tm.assert_series_equal(result, expected)
def test_extract_expand_True(any_string_dtype):
# Contains tests like those in test_match and some others.
s = Series(["fooBAD__barBAD", np.nan, "foo"], dtype=any_string_dtype)
result = s.str.extract(".*(BAD[_]+).*(BAD)", expand=True)
expected = DataFrame(
[["BAD__", "BAD"], [np.nan, np.nan], [np.nan, np.nan]], dtype=any_string_dtype
)
tm.assert_frame_equal(result, expected)
def test_extract_expand_True_mixed_object():
er = [np.nan, np.nan] # empty row
mixed = Series(
[
"aBAD_BAD",
np.nan,
"BAD_b_BAD",
True,
datetime.today(),
"foo",
None,
1,
2.0,
]
)
result = mixed.str.extract(".*(BAD[_]+).*(BAD)", expand=True)
expected = DataFrame([["BAD_", "BAD"], er, ["BAD_", "BAD"], er, er, er, er, er, er])
tm.assert_frame_equal(result, expected)
def test_extract_expand_True_single_capture_group_raises(
index_or_series, any_string_dtype
):
# these should work for both Series and Index
# no groups
s_or_idx = index_or_series(["A1", "B2", "C3"], dtype=any_string_dtype)
msg = "pattern contains no capture groups"
with pytest.raises(ValueError, match=msg):
s_or_idx.str.extract("[ABC][123]", expand=True)
# only non-capturing groups
with pytest.raises(ValueError, match=msg):
s_or_idx.str.extract("(?:[AB]).*", expand=True)
def test_extract_expand_True_single_capture_group(index_or_series, any_string_dtype):
# single group renames series/index properly
s_or_idx = index_or_series(["A1", "A2"], dtype=any_string_dtype)
result = s_or_idx.str.extract(r"(?P<uno>A)\d", expand=True)
expected_dtype = "object" if index_or_series is Index else any_string_dtype
expected = DataFrame({"uno": ["A", "A"]}, dtype=expected_dtype)
tm.assert_frame_equal(result, expected)
@pytest.mark.parametrize("name", [None, "series_name"])
def test_extract_series(name, any_string_dtype):
# extract should give the same result whether or not the series has a name.
s = Series(["A1", "B2", "C3"], name=name, dtype=any_string_dtype)
# one group, no matches
result = s.str.extract("(_)", expand=True)
expected = DataFrame([np.nan, np.nan, np.nan], dtype=any_string_dtype)
tm.assert_frame_equal(result, expected)
# two groups, no matches
result = s.str.extract("(_)(_)", expand=True)
expected = DataFrame(
[[np.nan, np.nan], [np.nan, np.nan], [np.nan, np.nan]], dtype=any_string_dtype
)
tm.assert_frame_equal(result, expected)
# one group, some matches
result = s.str.extract("([AB])[123]", expand=True)
expected = DataFrame(["A", "B", np.nan], dtype=any_string_dtype)
tm.assert_frame_equal(result, expected)
# two groups, some matches
result = s.str.extract("([AB])([123])", expand=True)
expected = DataFrame(
[["A", "1"], ["B", "2"], [np.nan, np.nan]], dtype=any_string_dtype
)
tm.assert_frame_equal(result, expected)
# one named group
result = s.str.extract("(?P<letter>[AB])", expand=True)
expected = DataFrame({"letter": ["A", "B", np.nan]}, dtype=any_string_dtype)
tm.assert_frame_equal(result, expected)
# two named groups
result = s.str.extract("(?P<letter>[AB])(?P<number>[123])", expand=True)
expected = DataFrame(
[["A", "1"], ["B", "2"], [np.nan, np.nan]],
columns=["letter", "number"],
dtype=any_string_dtype,
)
tm.assert_frame_equal(result, expected)
# mix named and unnamed groups
result = s.str.extract("([AB])(?P<number>[123])", expand=True)
expected = DataFrame(
[["A", "1"], ["B", "2"], [np.nan, np.nan]],
columns=[0, "number"],
dtype=any_string_dtype,
)
tm.assert_frame_equal(result, expected)
# one normal group, one non-capturing group
result = s.str.extract("([AB])(?:[123])", expand=True)
expected = DataFrame(["A", "B", np.nan], dtype=any_string_dtype)
tm.assert_frame_equal(result, expected)
def test_extract_optional_groups(any_string_dtype):
# two normal groups, one non-capturing group
s = Series(["A11", "B22", "C33"], dtype=any_string_dtype)
result = s.str.extract("([AB])([123])(?:[123])", expand=True)
expected = DataFrame(
[["A", "1"], ["B", "2"], [np.nan, np.nan]], dtype=any_string_dtype
)
tm.assert_frame_equal(result, expected)
# one optional group followed by one normal group
s = Series(["A1", "B2", "3"], dtype=any_string_dtype)
result = s.str.extract("(?P<letter>[AB])?(?P<number>[123])", expand=True)
expected = DataFrame(
[["A", "1"], ["B", "2"], [np.nan, "3"]],
columns=["letter", "number"],
dtype=any_string_dtype,
)
tm.assert_frame_equal(result, expected)
# one normal group followed by one optional group
s = Series(["A1", "B2", "C"], dtype=any_string_dtype)
result = s.str.extract("(?P<letter>[ABC])(?P<number>[123])?", expand=True)
expected = DataFrame(
[["A", "1"], ["B", "2"], ["C", np.nan]],
columns=["letter", "number"],
dtype=any_string_dtype,
)
tm.assert_frame_equal(result, expected)
def test_extract_dataframe_capture_groups_index(index, any_string_dtype):
# GH6348
# not passing index to the extractor
data = ["A1", "B2", "C"]
if len(index) < len(data):
pytest.skip("Index too short")
index = index[: len(data)]
s = Series(data, index=index, dtype=any_string_dtype)
result = s.str.extract(r"(\d)", expand=True)
expected = DataFrame(["1", "2", np.nan], index=index, dtype=any_string_dtype)
tm.assert_frame_equal(result, expected)
result = s.str.extract(r"(?P<letter>\D)(?P<number>\d)?", expand=True)
expected = DataFrame(
[["A", "1"], ["B", "2"], ["C", np.nan]],
columns=["letter", "number"],
index=index,
dtype=any_string_dtype,
)
tm.assert_frame_equal(result, expected)
def test_extract_single_group_returns_frame(any_string_dtype):
# GH11386 extract should always return DataFrame, even when
# there is only one group. Prior to v0.18.0, extract returned
# Series when there was only one group in the regex.
s = Series(["a3", "b3", "c2"], name="series_name", dtype=any_string_dtype)
result = s.str.extract(r"(?P<letter>[a-z])", expand=True)
expected = DataFrame({"letter": ["a", "b", "c"]}, dtype=any_string_dtype)
tm.assert_frame_equal(result, expected)
def test_extractall(any_string_dtype):
data = [
"dave@google.com",
"tdhock5@gmail.com",
"maudelaperriere@gmail.com",
"rob@gmail.com some text steve@gmail.com",
"a@b.com some text c@d.com and e@f.com",
np.nan,
"",
]
expected_tuples = [
("dave", "google", "com"),
("tdhock5", "gmail", "com"),
("maudelaperriere", "gmail", "com"),
("rob", "gmail", "com"),
("steve", "gmail", "com"),
("a", "b", "com"),
("c", "d", "com"),
("e", "f", "com"),
]
pat = r"""
(?P<user>[a-z0-9]+)
@
(?P<domain>[a-z]+)
\.
(?P<tld>[a-z]{2,4})
"""
expected_columns = ["user", "domain", "tld"]
s = Series(data, dtype=any_string_dtype)
# extractall should return a DataFrame with one row for each match, indexed by the
# subject from which the match came.
expected_index = MultiIndex.from_tuples(
[(0, 0), (1, 0), (2, 0), (3, 0), (3, 1), (4, 0), (4, 1), (4, 2)],
names=(None, "match"),
)
expected = DataFrame(
expected_tuples, expected_index, expected_columns, dtype=any_string_dtype
)
result = s.str.extractall(pat, flags=re.VERBOSE)
tm.assert_frame_equal(result, expected)
# The index of the input Series should be used to construct the index of the output
# DataFrame:
mi = MultiIndex.from_tuples(
[
("single", "Dave"),
("single", "Toby"),
("single", "Maude"),
("multiple", "robAndSteve"),
("multiple", "abcdef"),
("none", "missing"),
("none", "empty"),
]
)
s = Series(data, index=mi, dtype=any_string_dtype)
expected_index = MultiIndex.from_tuples(
[
("single", "Dave", 0),
("single", "Toby", 0),
("single", "Maude", 0),
("multiple", "robAndSteve", 0),
("multiple", "robAndSteve", 1),
("multiple", "abcdef", 0),
("multiple", "abcdef", 1),
("multiple", "abcdef", 2),
],
names=(None, None, "match"),
)
expected = DataFrame(
expected_tuples, expected_index, expected_columns, dtype=any_string_dtype
)
result = s.str.extractall(pat, flags=re.VERBOSE)
tm.assert_frame_equal(result, expected)
# MultiIndexed subject with names.
s = Series(data, index=mi, dtype=any_string_dtype)
s.index.names = ("matches", "description")
expected_index.names = ("matches", "description", "match")
expected = DataFrame(
expected_tuples, expected_index, expected_columns, dtype=any_string_dtype
)
result = s.str.extractall(pat, flags=re.VERBOSE)
tm.assert_frame_equal(result, expected)
@pytest.mark.parametrize(
"pat,expected_names",
[
# optional groups.
("(?P<letter>[AB])?(?P<number>[123])", ["letter", "number"]),
# only one of two groups has a name.
("([AB])?(?P<number>[123])", [0, "number"]),
],
)
def test_extractall_column_names(pat, expected_names, any_string_dtype):
s = Series(["", "A1", "32"], dtype=any_string_dtype)
result = s.str.extractall(pat)
expected = DataFrame(
[("A", "1"), (np.nan, "3"), (np.nan, "2")],
index=MultiIndex.from_tuples([(1, 0), (2, 0), (2, 1)], names=(None, "match")),
columns=expected_names,
dtype=any_string_dtype,
)
tm.assert_frame_equal(result, expected)
def test_extractall_single_group(any_string_dtype):
s = Series(["a3", "b3", "d4c2"], name="series_name", dtype=any_string_dtype)
expected_index = MultiIndex.from_tuples(
[(0, 0), (1, 0), (2, 0), (2, 1)], names=(None, "match")
)
# extractall(one named group) returns DataFrame with one named column.
result = s.str.extractall(r"(?P<letter>[a-z])")
expected = DataFrame(
{"letter": ["a", "b", "d", "c"]}, index=expected_index, dtype=any_string_dtype
)
tm.assert_frame_equal(result, expected)
# extractall(one un-named group) returns DataFrame with one un-named column.
result = s.str.extractall(r"([a-z])")
expected = DataFrame(
["a", "b", "d", "c"], index=expected_index, dtype=any_string_dtype
)
tm.assert_frame_equal(result, expected)
def test_extractall_single_group_with_quantifier(any_string_dtype):
# GH#13382
# extractall(one un-named group with quantifier) returns DataFrame with one un-named
# column.
s = Series(["ab3", "abc3", "d4cd2"], name="series_name", dtype=any_string_dtype)
result = s.str.extractall(r"([a-z]+)")
expected = DataFrame(
["ab", "abc", "d", "cd"],
index=MultiIndex.from_tuples(
[(0, 0), (1, 0), (2, 0), (2, 1)], names=(None, "match")
),
dtype=any_string_dtype,
)
tm.assert_frame_equal(result, expected)
@pytest.mark.parametrize(
"data, names",
[
([], (None,)),
([], ("i1",)),
([], (None, "i2")),
([], ("i1", "i2")),
(["a3", "b3", "d4c2"], (None,)),
(["a3", "b3", "d4c2"], ("i1", "i2")),
(["a3", "b3", "d4c2"], (None, "i2")),
(["a3", "b3", "d4c2"], ("i1", "i2")),
],
)
def test_extractall_no_matches(data, names, any_string_dtype):
# GH19075 extractall with no matches should return a valid MultiIndex
n = len(data)
if len(names) == 1:
index = Index(range(n), name=names[0])
else:
tuples = (tuple([i] * (n - 1)) for i in range(n))
index = MultiIndex.from_tuples(tuples, names=names)
s = Series(data, name="series_name", index=index, dtype=any_string_dtype)
expected_index = MultiIndex.from_tuples([], names=(names + ("match",)))
# one un-named group.
result = s.str.extractall("(z)")
expected = DataFrame(columns=[0], index=expected_index, dtype=any_string_dtype)
tm.assert_frame_equal(result, expected)
# two un-named groups.
result = s.str.extractall("(z)(z)")
expected = DataFrame(columns=[0, 1], index=expected_index, dtype=any_string_dtype)
tm.assert_frame_equal(result, expected)
# one named group.
result = s.str.extractall("(?P<first>z)")
expected = DataFrame(
columns=["first"], index=expected_index, dtype=any_string_dtype
)
tm.assert_frame_equal(result, expected)
# two named groups.
result = s.str.extractall("(?P<first>z)(?P<second>z)")
expected = DataFrame(
columns=["first", "second"], index=expected_index, dtype=any_string_dtype
)
tm.assert_frame_equal(result, expected)
# one named, one un-named.
result = s.str.extractall("(z)(?P<second>z)")
expected = DataFrame(
columns=[0, "second"], index=expected_index, dtype=any_string_dtype
)
tm.assert_frame_equal(result, expected)
def test_extractall_stringindex(any_string_dtype):
s = Series(["a1a2", "b1", "c1"], name="xxx", dtype=any_string_dtype)
result = s.str.extractall(r"[ab](?P<digit>\d)")
expected = DataFrame(
{"digit": ["1", "2", "1"]},
index=MultiIndex.from_tuples([(0, 0), (0, 1), (1, 0)], names=[None, "match"]),
dtype=any_string_dtype,
)
tm.assert_frame_equal(result, expected)
# index should return the same result as the default index without name thus
# index.name doesn't affect to the result
if any_string_dtype == "object":
for idx in [
Index(["a1a2", "b1", "c1"]),
Index(["a1a2", "b1", "c1"], name="xxx"),
]:
result = idx.str.extractall(r"[ab](?P<digit>\d)")
tm.assert_frame_equal(result, expected)
s = Series(
["a1a2", "b1", "c1"],
name="s_name",
index=Index(["XX", "yy", "zz"], name="idx_name"),
dtype=any_string_dtype,
)
result = s.str.extractall(r"[ab](?P<digit>\d)")
expected = DataFrame(
{"digit": ["1", "2", "1"]},
index=MultiIndex.from_tuples(
[("XX", 0), ("XX", 1), ("yy", 0)], names=["idx_name", "match"]
),
dtype=any_string_dtype,
)
tm.assert_frame_equal(result, expected)
def test_extractall_no_capture_groups_raises(any_string_dtype):
# Does not make sense to use extractall with a regex that has no capture groups.
# (it returns DataFrame with one column for each capture group)
s = Series(["a3", "b3", "d4c2"], name="series_name", dtype=any_string_dtype)
with pytest.raises(ValueError, match="no capture groups"):
s.str.extractall(r"[a-z]")
def test_extract_index_one_two_groups():
s = Series(["a3", "b3", "d4c2"], index=["A3", "B3", "D4"], name="series_name")
r = s.index.str.extract(r"([A-Z])", expand=True)
e = DataFrame(["A", "B", "D"])
tm.assert_frame_equal(r, e)
# Prior to v0.18.0, index.str.extract(regex with one group)
# returned Index. With more than one group, extract raised an
# error (GH9980). Now extract always returns DataFrame.
r = s.index.str.extract(r"(?P<letter>[A-Z])(?P<digit>[0-9])", expand=True)
e_list = [("A", "3"), ("B", "3"), ("D", "4")]
e = DataFrame(e_list, columns=["letter", "digit"])
tm.assert_frame_equal(r, e)
def test_extractall_same_as_extract(any_string_dtype):
s = Series(["a3", "b3", "c2"], name="series_name", dtype=any_string_dtype)
pattern_two_noname = r"([a-z])([0-9])"
extract_two_noname = s.str.extract(pattern_two_noname, expand=True)
has_multi_index = s.str.extractall(pattern_two_noname)
no_multi_index = has_multi_index.xs(0, level="match")
tm.assert_frame_equal(extract_two_noname, no_multi_index)
pattern_two_named = r"(?P<letter>[a-z])(?P<digit>[0-9])"
extract_two_named = s.str.extract(pattern_two_named, expand=True)
has_multi_index = s.str.extractall(pattern_two_named)
no_multi_index = has_multi_index.xs(0, level="match")
tm.assert_frame_equal(extract_two_named, no_multi_index)
pattern_one_named = r"(?P<group_name>[a-z])"
extract_one_named = s.str.extract(pattern_one_named, expand=True)
has_multi_index = s.str.extractall(pattern_one_named)
no_multi_index = has_multi_index.xs(0, level="match")
tm.assert_frame_equal(extract_one_named, no_multi_index)
pattern_one_noname = r"([a-z])"
extract_one_noname = s.str.extract(pattern_one_noname, expand=True)
has_multi_index = s.str.extractall(pattern_one_noname)
no_multi_index = has_multi_index.xs(0, level="match")
tm.assert_frame_equal(extract_one_noname, no_multi_index)
def test_extractall_same_as_extract_subject_index(any_string_dtype):
# same as above tests, but s has an MultiIndex.
mi = MultiIndex.from_tuples(
[("A", "first"), ("B", "second"), ("C", "third")],
names=("capital", "ordinal"),
)
s = Series(["a3", "b3", "c2"], index=mi, name="series_name", dtype=any_string_dtype)
pattern_two_noname = r"([a-z])([0-9])"
extract_two_noname = s.str.extract(pattern_two_noname, expand=True)
has_match_index = s.str.extractall(pattern_two_noname)
no_match_index = has_match_index.xs(0, level="match")
tm.assert_frame_equal(extract_two_noname, no_match_index)
pattern_two_named = r"(?P<letter>[a-z])(?P<digit>[0-9])"
extract_two_named = s.str.extract(pattern_two_named, expand=True)
has_match_index = s.str.extractall(pattern_two_named)
no_match_index = has_match_index.xs(0, level="match")
tm.assert_frame_equal(extract_two_named, no_match_index)
pattern_one_named = r"(?P<group_name>[a-z])"
extract_one_named = s.str.extract(pattern_one_named, expand=True)
has_match_index = s.str.extractall(pattern_one_named)
no_match_index = has_match_index.xs(0, level="match")
tm.assert_frame_equal(extract_one_named, no_match_index)
pattern_one_noname = r"([a-z])"
extract_one_noname = s.str.extract(pattern_one_noname, expand=True)
has_match_index = s.str.extractall(pattern_one_noname)
no_match_index = has_match_index.xs(0, level="match")
tm.assert_frame_equal(extract_one_noname, no_match_index)
| Python | 5 | ajayiagbebaku/NFL-Model | venv/Lib/site-packages/pandas/tests/strings/test_extract.py | [
"MIT"
] |
// Copyright 2010-2014 RethinkDB, all rights reserved.
#ifndef CONTAINERS_DISK_BACKED_QUEUE_HPP_
#define CONTAINERS_DISK_BACKED_QUEUE_HPP_
#include <string>
#include <vector>
#include "concurrency/fifo_checker.hpp"
#include "concurrency/mutex.hpp"
#include "containers/buffer_group.hpp"
#include "containers/archive/buffer_group_stream.hpp"
#include "containers/archive/vector_stream.hpp"
#include "containers/scoped.hpp"
#include "perfmon/core.hpp"
#include "serializer/types.hpp"
class cache_balancer_t;
class cache_conn_t;
class cache_t;
class txn_t;
class io_backender_t;
class perfmon_collection_t;
class serializer_filepath_t;
ATTR_PACKED(struct queue_block_t {
block_id_t next;
int32_t data_size, live_data_offset;
char data[0];
});
class value_acquisition_object_t;
class buffer_group_viewer_t {
public:
virtual void view_buffer_group(const const_buffer_group_t *group) = 0;
protected:
buffer_group_viewer_t() { }
virtual ~buffer_group_viewer_t() { }
DISABLE_COPYING(buffer_group_viewer_t);
};
class internal_disk_backed_queue_t {
public:
internal_disk_backed_queue_t(io_backender_t *io_backender, const serializer_filepath_t& filename, perfmon_collection_t *stats_parent);
~internal_disk_backed_queue_t();
void push(const write_message_t &value);
void push(const scoped_array_t<write_message_t> &values);
void pop(buffer_group_viewer_t *viewer);
bool empty();
int64_t size();
private:
void add_block_to_head(txn_t *txn);
void remove_block_from_tail(txn_t *txn);
void push_single(txn_t *txn, const write_message_t &value);
mutex_t mutex;
// Serves more as sanity-checking for the cache than this type's ordering.
order_source_t cache_order_source;
perfmon_collection_t perfmon_collection;
perfmon_membership_t perfmon_membership;
int64_t queue_size;
// The end we push onto.
block_id_t head_block_id;
// The end we pop from.
block_id_t tail_block_id;
scoped_ptr_t<serializer_file_opener_t> file_opener;
scoped_ptr_t<log_serializer_t> serializer;
scoped_ptr_t<cache_balancer_t> balancer;
scoped_ptr_t<cache_t> cache;
scoped_ptr_t<cache_conn_t> cache_conn;
DISABLE_COPYING(internal_disk_backed_queue_t);
};
template <class T>
class deserializing_viewer_t : public buffer_group_viewer_t {
public:
explicit deserializing_viewer_t(T *value_out) : value_out_(value_out) { }
virtual ~deserializing_viewer_t() { }
virtual void view_buffer_group(const const_buffer_group_t *group) {
// TODO: We assume here that the data was serialized by _other_ code using
// LATEST -- some in disk_backed_queue_t::push, but also in btree_store.cc,
// which uses internal_disk_backed_queue_t directly. (There's no good reason
// for this today: it needed to be generic when that code was templatized on
// protocol_t.)
deserialize_from_group<cluster_version_t::LATEST_OVERALL>(group, value_out_);
}
private:
T *value_out_;
DISABLE_COPYING(deserializing_viewer_t);
};
// Copies the buffer group into a write_message_t
class copying_viewer_t : public buffer_group_viewer_t {
public:
explicit copying_viewer_t(write_message_t *wm_out) : wm_out_(wm_out) { }
~copying_viewer_t() { }
void view_buffer_group(const const_buffer_group_t *group) {
buffer_group_read_stream_t stream(group);
char buf[1024];
while (!stream.entire_stream_consumed()) {
int64_t c = stream.read(&buf, 1024);
wm_out_->append(&buf, c);
}
}
private:
write_message_t *wm_out_;
DISABLE_COPYING(copying_viewer_t);
};
template <class T>
class disk_backed_queue_t {
public:
disk_backed_queue_t(io_backender_t *io_backender, const serializer_filepath_t& filename, perfmon_collection_t *stats_parent)
: internal_(io_backender, filename, stats_parent) { }
void push(const T &t) {
// TODO: There's an unnecessary copying of data here (which would require a
// serialization_size overloaded function to be implemented in order to eliminate).
// TODO: We have such a serialization_size function.
write_message_t wm;
// Despite that we are serializing this *to disk* disk backed
// queues are not intended to persist across restarts, so this
// is safe.
serialize<cluster_version_t::LATEST_OVERALL>(&wm, t);
internal_.push(wm);
}
void pop(T *out) {
deserializing_viewer_t<T> viewer(out);
internal_.pop(&viewer);
}
bool empty() {
return internal_.empty();
}
int64_t size() {
return internal_.size();
}
private:
internal_disk_backed_queue_t internal_;
DISABLE_COPYING(disk_backed_queue_t);
};
#endif /* CONTAINERS_DISK_BACKED_QUEUE_HPP_ */
| C++ | 4 | zadcha/rethinkdb | src/containers/disk_backed_queue.hpp | [
"Apache-2.0"
] |
source language/playground/lib.nu | Nu | 0 | WindSoilder/nu_scripts | language/playground.nu | [
"MIT"
] |
RwPlatformNestedProjectLoadComponentV2 {
#name : 'tests/gemstone/Sparkle',
#condition : [
'gs3.[7-]'
],
#packageNames : [
'Sparkle-Tools-GemStone-Test'
],
#comment : 'gemstone packages for Sparkle, minimum version 3.7.0'
} | STON | 2 | lifeware-sa/Sparkle | rowan/components/tests/gemstone/Sparkle.ston | [
"MIT"
] |
#include <PJONVirtualBusRouter.h>
#include <PJONSoftwareBitBang.h>
#include <PJONOverSampling.h>
/* This sketch is routing between two local buses to form one larger local bus,
just like the VirtualBusRouter example, but it uses the class
PJONVirtualBusRouter2 which is specialized to using two buses to provide
a simpler declaration.
// ROUTER forwarding from bus 1 to bus 2 and vice versa
__________ ________ __________
| | Bus 1 Pin 7 | | Pin 12 Bus 2 | |
| DEVICE 1 |_______________| ROUTER |_______________| DEVICE 2 |
|__________| |________| |__________| */
PJONVirtualBusRouter2<SoftwareBitBang, OverSampling> router;
void setup() {
router.get_strategy_0().set_pin(7);
router.get_strategy_1().set_pin(12);
router.set_virtual_bus(0);
router.begin();
};
void loop() {
router.loop();
};
| Arduino | 4 | solhuebner/PJON | examples/routing/ARDUINO/Local/VirtualBusRouter/VirtualBusRouter2/VirtualBusRouter2.ino | [
"Apache-2.0"
] |
module Automaster =
autoload xfm
let eol = Util.eol
let comment = Util.comment
let empty = Util.empty
let mount_point = store /\/[^# \t\n]+/
let include = [ label "include" .
del /\+[ \t]*/ "+" .
store /[^# \t\n]+/ .
eol ]
let options = [ label "options" . store /-[^ \t\n]+/ ]
let map_param =
let name = [ label "name" . store /[^: \t\n]+/ ]
in let type = [ label "type" . store /[a-z]+/ ]
in let format = [ label "format" . store /[a-z]+/ ]
in let options = [ label "options" . store /[^ \t\n]+/ ]
in let prelude = ( type .
( del "," "," . format ) ? .
del ":" ":" )
in ( prelude ? .
name .
( Util.del_ws_spc . options ) ? )
let map_record = [ label "map" .
mount_point . Util.del_ws_spc .
map_param .
eol ]
let lns = ( map_record |
include |
comment |
empty ) *
let relevant = (incl "/etc/auto.master") .
Util.stdexcl
let xfm = transform lns relevant
| Augeas | 4 | jaredjennings/puppet-cmits-augeas | files/1.2.0/lenses/automaster.aug | [
"Apache-2.0"
] |
var $extern_var extern i32 public
func $test_extern_var ( var %i i32 ) i32 {
return (dread i32 $extern_var) }
# EXEC: %irbuild Main.mpl
# EXEC: %irbuild Main.irb.mpl
# EXEC: %cmp Main.irb.mpl Main.irb.irb.mpl
| Maple | 2 | harmonyos-mirror/OpenArkCompiler-test | test/testsuite/irbuild_test/I0027-mapleall-irbuild-edge-externvar/Main.mpl | [
"MulanPSL-1.0"
] |
import asyncore, socket, json, sqlite3, time
FLAG1 = "flag{XXXXXXXXXXX}"
POINT_TRESHOLD = 200
def json_response(code, additional_parameter=""):
response_codes = {
0 : "Point added",
1 : "Collision found",
2 : "Point already included",
3 : 'Wrong input format. Please provide a string like this: {"x": val, "y": val, "c": val, "d": val, "groupID": val})',
4 : "Value mismatch! X != c*P + d*Q",
5 : "Server Error"
}
return '{"Response": "%d", "Message": "%s"%s}' % (code, response_codes[code], additional_parameter)
# Teams should choose a non-guessable groupID
def get_response(x, y, c, d, groupID):
# open connection to database
conn = sqlite3.connect("points.db")
conn.row_factory = sqlite3.Row
conn_cursor = conn.cursor()
# convert sage integers to string to avoid "Python int too large for SQLite INTEGER"
x = str(x)
y = str(y)
c = str(c)
d = str(d)
# Select records that map to the same X value
conn_cursor.execute("SELECT * FROM points WHERE x = :x", {"x": x})
query = conn_cursor.fetchall()
# No record found -> Point is not yet included
if len(query) == 0:
# Insert point into database
conn_cursor.execute("INSERT INTO points (x, y, c, d, groupID) VALUES (?, ?, ?, ?, ?)",
(x, y, c, d, groupID))
# Get number of points added by this group
conn_cursor.execute("SELECT x FROM points WHERE groupID = :gID", {"gID": groupID})
points_found = conn_cursor.fetchall()
add_param = ', "points_found": %d' % len(points_found)
# When they found POINT_TRESHOLD distinguished points and a collision occured, return the colliding values as well
if len(points_found) > POINT_TRESHOLD:
add_param += ', "flag1": "%s"' % FLAG1
if server.collision_found:
# compute x from the collision, second flag is just x (not in flag format)
add_param += ', "collision": %s' % (server.collision)
response = json_response(0, add_param)
else:
# One (or more) records found -> check if they have the same exponents
is_included = False
for row in query:
if row["c"] == c and row["d"] == d:
is_included = True
response = json_response(2)
break
if not is_included:
# Exponents are different -> Collision found, add this point
conn_cursor.execute("INSERT INTO points (x, y, c, d, groupID, collision) VALUES (?, ?, ?, ?, ?, 1)",
(x, y, c, d, groupID))
# Get number of points added by this group
conn_cursor.execute("SELECT x FROM points WHERE groupID = :gID", {"gID": groupID})
points_found = conn_cursor.fetchall()
add_param = ', "points_found": %d' % len(points_found)
# add collision
server.collision_found = True
server.collision = '{"c_1": %s, "d_1": %s, "c_2": %s, "d_2": %s}' % (c, d, row["c"], row["d"])
if len(points_found) > POINT_TRESHOLD:
add_param += ', "collision": %s' % (server.collision)
else:
add_param += ', "collision": "collision found but not enough distinguished points submitted yet"'
response = json_response(1, add_param + ', "c": %s, "d": %s' % (row["c"], row["d"]))
# close db connection and return response
conn.commit()
conn_cursor.close()
conn.close()
return response
class DLogHandler(asyncore.dispatcher_with_send):
def handle_read(self):
try:
json_data = self.recv(8192)
if not json_data:
return
data = json.loads(json_data)
# check if the format is correct
if not ("x" in data and "y" in data and "c" in data and "d" in data and "groupID" in data):
response = json_response(3)
else:
c = Integer(data["c"])
d = Integer(data["d"])
x = Integer(data["x"])
y = Integer(data["y"])
X = E((x, y))
if X == c*P + d*Q:
response = get_response(data["x"], data["y"], data["c"], data["d"], data["groupID"])
else:
print("expected %s = %d*%s + %d*%s, but got %s" % (c*P + d*Q, c, P, d, Q, X))
response = json_response(4)
self.send(response)
except Exception as e:
response = json_response(5, ', "Error Message": "%s"' % e)
class Server(asyncore.dispatcher_with_send):
def __init__(self, host, port):
asyncore.dispatcher.__init__(self)
self.create_socket(socket.AF_INET, socket.SOCK_STREAM)
self.set_reuse_addr()
self.bind((host, port))
self.listen(5)
# variable to store some collision
self.collision_found = False
self.collision = {}
def handle_accept(self):
pair = self.accept()
if pair is not None:
sock, addr = pair
print("incoming connection from %s" % repr(addr))
DLogHandler(sock)
if __name__ == '__main__':
load("parameters.sage")
server = Server(serverAdress, serverPort)
asyncore.loop()
| Sage | 5 | amoniaka-knabino/Crypton | Discrete-Logarithm-Problem/Elliptic-Curve-DLP/Algo-Pollard-Rho/Challenges/Multiplayer-2/server.sage | [
"MIT"
] |
CDF
dim0 dim1
f ` | nesC | 0 | jcphill/gdal | autotest/gdrivers/data/netcdf/char_2d_zero_dim.nc | [
"Apache-2.0"
] |
// @noStrictGenericChecks: true
type A = <T, U>(x: T, y: U) => [T, U];
type B = <S>(x: S, y: S) => [S, S];
function f(a: A, b: B) {
a = b; // Error disabled here
b = a; // Ok
}
| TypeScript | 3 | nilamjadhav/TypeScript | tests/cases/compiler/noStrictGenericChecks.ts | [
"Apache-2.0"
] |
HAI 1.2
VISIBLE "1 2 Fizz 4 Buzz Fizz 7 8 Fizz Buzz 11 Fizz 13 14 FizzBuzz 16 17 Fizz 19 Buzz Fizz 22 23 Fizz Buzz 26 Fizz 28 29 FizzBuzz 31 32 Fizz 34 Buzz Fizz 37 38 Fizz Buzz 41 Fizz 43 44 FizzBuzz 46 47 Fizz 49 Buzz Fizz 52 53 Fizz Buzz 56 Fizz 58 59 FizzBuzz 61 62 Fizz 64 Buzz Fizz 67 68 Fizz Buzz 71 Fizz 73 74 FizzBuzz 76 77 Fizz 79 Buzz Fizz 82 83 Fizz Buzz 86 Fizz 88 89 FizzBuzz 91 92 Fizz 94 Buzz Fizz 97 98 Fizz Buzz "
KTHXBYE
| LOLCODE | 3 | nihirgupta/Hacktoberfest-2020-FizzBuzz | LOLCODE/LOLcode_hard.lol | [
"Unlicense"
] |
474
0 0
1 -0.2
2 -0.2
3 0
4 -0.2
5 -0.2
6 0
7 -0.2
8 -0.2
9 0
10 -0.2
11 -0.2
12 0
13 -0.2
14 -0.2
15 0
16 -0.2
17 -0.2
18 0
19 -0.2
20 -0.2
21 0
22 -0.2
23 -0.2
24 0
25 -0.2
26 -0.2
27 0
28 -0.2
29 -0.2
30 0
31 -0.2
32 -0.2
33 0
34 -0.2
35 -0.2
36 0
37 -0.2
38 -0.2
39 0
40 -0.2
41 -0.2
42 0
43 -0.2
44 -0.2
45 0
46 -0.2
47 -0.2
48 0
49 -0.2
50 -0.2
51 0
52 -0.2
53 -0.2
54 0
55 -0.2
56 -0.2
57 0
58 -0.2
59 -0.2
60 0
61 -0.2
62 -0.2
63 0
64 -0.2
65 -0.2
66 0
67 -0.2
68 -0.2
69 0
70 -0.2
71 -0.2
72 0
73 -0.2
74 -0.2
75 0
76 -0.2
77 -0.2
78 0
79 -0.2
80 -0.2
81 0
82 -0.2
83 -0.2
84 0
85 -0.2
86 -0.2
87 0
88 -0.2
89 -0.2
90 0
91 -0.2
92 -0.2
93 0
94 -0.2
95 -0.2
96 0
97 -0.2
98 -0.2
99 0
100 -0.2
101 -0.2
102 0
103 -0.2
104 -0.2
105 0
106 -0.2
107 -0.2
108 0
109 -0.2
110 -0.2
111 0
112 -0.2
113 -0.2
114 0
115 -0.2
116 -0.2
117 0
118 -0.2
119 -0.2
120 0
121 -0.2
122 -0.2
123 0
124 -0.2
125 -0.2
126 0
127 -0.2
128 -0.2
129 0
130 -0.2
131 -0.2
132 0
133 -0.2
134 -0.2
135 0
136 -0.2
137 -0.2
138 0
139 -0.2
140 -0.2
141 0
142 -0.2
143 -0.2
144 0
145 -0.2
146 -0.2
147 0
148 -0.2
149 -0.2
150 0
151 -0.2
152 -0.2
153 0
154 -0.2
155 -0.2
156 0
157 -0.2
158 -0.2
159 0
160 -0.2
161 -0.2
162 0
163 -0.2
164 -0.2
165 0
166 -0.2
167 -0.2
168 0
169 -0.2
170 -0.2
171 0
172 -0.2
173 -0.2
174 0
175 -0.2
176 -0.2
177 0
178 -0.2
179 -0.2
180 0
181 -0.2
182 -0.2
183 0
184 -0.2
185 -0.2
186 0
187 -0.2
188 -0.2
189 0
190 -0.2
191 -0.2
192 0
193 -0.2
194 -0.2
195 0
196 -0.2
197 -0.2
198 0
199 -0.2
200 -0.2
201 0
202 -0.2
203 -0.2
204 0
205 -0.2
206 -0.2
207 0
208 -0.2
209 -0.2
210 0
211 -0.2
212 -0.2
213 0
214 -0.2
215 -0.2
216 0
217 -0.2
218 -0.2
219 0
220 -0.2
221 -0.2
222 0
223 -0.2
224 -0.2
225 0
226 -0.2
227 -0.2
228 0
229 -0.2
230 -0.2
231 0
232 -0.2
233 -0.2
234 0
235 -0.2
236 -0.2
237 0
238 -0.2
239 -0.2
240 0
241 -0.2
242 -0.2
243 0
244 -0.2
245 -0.2
246 0
247 -0.2
248 -0.2
249 0
250 -0.2
251 -0.2
252 0
253 -0.2
254 -0.2
255 0
256 -0.2
257 -0.2
258 0
259 -0.2
260 -0.2
261 0
262 -0.2
263 -0.2
264 0
265 -0.2
266 -0.2
267 0
268 -0.2
269 -0.2
270 0
271 -0.2
272 -0.2
273 0
274 -0.2
275 -0.2
276 0
277 -0.2
278 -0.2
279 0
280 -0.2
281 -0.2
282 0
283 -0.2
284 -0.2
285 0
286 -0.2
287 -0.2
288 0
289 -0.2
290 -0.2
291 0
292 -0.2
293 -0.2
294 0
295 -0.2
296 -0.2
297 0
298 -0.2
299 -0.2
300 0
301 -0.2
302 -0.2
303 0
304 -0.2
305 -0.2
306 0
307 -0.2
308 -0.2
309 0
310 -0.2
311 -0.2
312 0
313 -0.2
314 -0.2
315 0
316 -0.2
317 -0.2
318 0
319 -0.2
320 -0.2
321 0
322 -0.2
323 -0.2
324 0
325 -0.2
326 -0.2
327 0
328 -0.2
329 -0.2
330 0
331 -0.2
332 -0.2
333 0
334 -0.2
335 -0.2
336 0
337 -0.2
338 -0.2
339 0
340 -0.2
341 -0.2
342 0
343 -0.2
344 -0.2
345 0
346 -0.2
347 -0.2
348 0
349 -0.2
350 -0.2
351 0
352 -0.2
353 -0.2
354 0
355 -0.2
356 -0.2
357 0
358 -0.2
359 -0.2
360 0
361 -0.2
362 -0.2
363 0
364 -0.2
365 -0.2
366 0
367 -0.2
368 -0.2
369 0
370 -0.2
371 -0.2
372 0
373 -0.2
374 -0.2
375 0
376 -0.2
377 -0.2
378 0
379 -0.2
380 -0.2
381 0
382 -0.2
383 -0.2
384 0
385 -0.2
386 -0.2
387 0
388 -0.2
389 -0.2
390 0
391 -0.2
392 -0.2
393 0
394 -0.2
395 -0.2
396 0
397 -0.2
398 -0.2
399 0
400 -0.2
401 -0.2
402 0
403 -0.2
404 -0.2
405 0
406 -0.2
407 -0.2
408 0
409 -0.2
410 -0.2
411 0
412 -0.2
413 -0.2
414 0
415 -0.2
416 -0.2
417 0
418 -0.2
419 -0.2
420 0
421 -0.2
422 -0.2
423 0
424 -0.2
425 -0.2
426 0
427 -0.2
428 -0.2
429 0
430 -0.2
431 -0.2
432 0
433 -0.2
434 -0.2
435 0
436 -0.2
437 -0.2
438 0
439 -0.2
440 -0.2
441 0
442 -0.2
443 -0.2
444 0
445 -0.2
446 -0.2
447 0
448 -0.2
449 -0.2
450 0
451 -0.2
452 -0.2
453 0
454 -0.2
455 -0.2
456 0
457 -0.2
458 -0.2
459 0
460 -0.2
461 -0.2
462 0
463 -0.2
464 -0.2
465 0
466 -0.2
467 -0.2
468 0
469 -0.2
470 -0.2
471 0
472 -0.2
473 -0.2
| IDL | 0 | ricortiz/OpenTissue | demos/data/dlm/474/lo.dlm | [
"Zlib"
] |
input {
padding: .5rem;
}
| CSS | 1 | John-Cassidy/angular | aio/content/examples/toh-pt4/src/app/hero-detail/hero-detail.component.css | [
"MIT"
] |
{-
Types Summer School 2007
Bertinoro
Aug 19 - 31, 2007
Agda
Ulf Norell
-}
module Parity where
open import Nat
-- Parity n tells us whether n is even or odd.
data Parity : Nat -> Set where
even : (k : Nat) -> Parity (2 * k)
odd : (k : Nat) -> Parity (2 * k + 1)
-- Every number is either even or odd.
parity : (n : Nat) -> Parity n
parity zero = even zero
parity (suc n) with parity n
parity (suc .(2 * k)) | even k = {! !}
parity (suc .(2 * k + 1)) | odd k = {! !}
half : Nat -> Nat
half n with parity n
half .(2 * k) | even k = k
half .(2 * k + 1) | odd k = k
| Agda | 5 | cruhland/agda | examples/SummerSchool07/Lecture/Parity.agda | [
"MIT"
] |
/*
* Copyright 2010-2017 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#ifndef RUNTIME_NATIVES_H
#define RUNTIME_NATIVES_H
#include "Types.h"
#include "Exceptions.h"
#include "Memory.h"
constexpr size_t alignUp(size_t size, size_t alignment) {
return (size + alignment - 1) & ~(alignment - 1);
}
template <typename T>
inline T* AddressOfElementAt(ArrayHeader* obj, KInt index) {
int8_t* body = reinterpret_cast<int8_t*>(obj) + alignUp(sizeof(ArrayHeader), alignof(T));
return reinterpret_cast<T*>(body) + index;
}
template <typename T>
inline const T* AddressOfElementAt(const ArrayHeader* obj, KInt index) {
const int8_t* body = reinterpret_cast<const int8_t*>(obj) + alignUp(sizeof(ArrayHeader), alignof(T));
return reinterpret_cast<const T*>(body) + index;
}
// Optimized versions not accessing type info.
inline KByte* ByteArrayAddressOfElementAt(ArrayHeader* obj, KInt index) {
return AddressOfElementAt<KByte>(obj, index);
}
inline const KByte* ByteArrayAddressOfElementAt(const ArrayHeader* obj, KInt index) {
return AddressOfElementAt<KByte>(obj, index);
}
inline KChar* CharArrayAddressOfElementAt(ArrayHeader* obj, KInt index) {
return AddressOfElementAt<KChar>(obj, index);
}
inline const KChar* CharArrayAddressOfElementAt(const ArrayHeader* obj, KInt index) {
return AddressOfElementAt<KChar>(obj, index);
}
inline KInt* IntArrayAddressOfElementAt(ArrayHeader* obj, KInt index) {
return AddressOfElementAt<KInt>(obj, index);
}
inline const KInt* IntArrayAddressOfElementAt(const ArrayHeader* obj, KInt index) {
return AddressOfElementAt<KInt>(obj, index);
}
// Consider aligning of base to sizeof(T).
template <typename T>
inline T* PrimitiveArrayAddressOfElementAt(ArrayHeader* obj, KInt index) {
return AddressOfElementAt<T>(obj, index);
}
template <typename T>
inline const T* PrimitiveArrayAddressOfElementAt(const ArrayHeader* obj, KInt index) {
return AddressOfElementAt<T>(obj, index);
}
inline KRef* ArrayAddressOfElementAt(ArrayHeader* obj, KInt index) {
return AddressOfElementAt<KRef>(obj, index);
}
inline const KRef* ArrayAddressOfElementAt(const ArrayHeader* obj, KInt index) {
return AddressOfElementAt<KRef>(obj, index);
}
#ifdef __cplusplus
extern "C" {
#endif
OBJ_GETTER0(TheEmptyString);
void Kotlin_io_Console_println0();
void Kotlin_NativePtrArray_set(KRef thiz, KInt index, KNativePtr value);
KNativePtr Kotlin_NativePtrArray_get(KConstRef thiz, KInt index);
#ifdef __cplusplus
}
#endif
#endif // RUNTIME_NATIVES_H
| C | 3 | Mu-L/kotlin | kotlin-native/runtime/src/main/cpp/Natives.h | [
"ECL-2.0",
"Apache-2.0"
] |
h=",1,7,10,13,19,23,28,31,32,44,49,68,70,79,82,86,91,94,97,100,103,10"
h+="9,129,130,133,139,167,176,188,190,192,193,203,208,219,226,230,236"
h+=",239," j=10k=100l=1000
a=:i%j b=:i%k/j-a/j c=:i%l/k-:i/k%1d=:i/l d-=d%1 i=a*a+b*b+c*c+d
:o="Sad" if h-(","+i+",")!=h then:o="Happy"end :done=1goto4
/--------//--------//--------//--------//--------//--------//--------/
h=","s=",145,"e=","j=10k=100l=k*j
a=:i%j b=:i%k/j-a/j c=:i%l/k-:i/k%1d=:i/l d-=d%1:i=a*a+b*b+c*c+d*d
n=e+:i+e-0f+=:i+e p=s-n==s x/=(h-n==h)*(:i>1)*p goto2
ifp thenh+=f:o="Happy"else s+=f:o="Sad"end f=",":done=1goto2
d=:i/l-:i/l%1
d=:i/l d-=d%1
/--------//--------//--------//--------//--------//--------//--------/
h=","s=",145,"e=","j=10k=100l=k*j m=l*j y=1
i=:i goto3
a=i%j b=i%k/j c=i%l/k d=i%m/l d-=c/j c-=b/j b-=a/j i=a*a+b*b+c*c+d*d
n=e+i+e-0f+=i+e p=s-n==s x/=(h-n==h)*(i>1)*p goto3
if p then h+=f:o="Happy"else s+=f:o="Sad"end f=",":done=1goto2
/--------//--------//--------//--------//--------//--------//--------/
h=","s=",145,"e=","j=10k=100l=k*j m=l*j
i=:i f=i+e
a=i%j b=i%k/j c=i%l/k d=i%m/l d-=c/j c-=b/j b-=a/j i=a*a+b*b+c*c+d*d
n=e+i+e f+=i+e p=s-n==s x/=(h-n==h)*(i>1)*p goto3
if p then h+=f:o="Happy"else s+=f:o="Sad"end f=",":done=1goto2
a=digit4
b=digit3
c=digit2
d=digit1
e=","
f=numbers so far in this sequence
h=known happy numbers
i=input number
j=10
k=100
l=1000
m=10000
n=",current_number,"
o=is_happy
p=is_sad
q=is end of sequence (i.e. is happy)
s=known sad numbers
/--------//--------//--------//--------//--------//--------//--------/
j=10 k=100 l=1000 m=10000
i=:i
a=i%j b=i%k/j c=i%l/k d=i%m/l d-=c/j c-=b/j b-=a/j
a=i%j i=(i-a)/j b=i%j i=i/j-a/j c=i%j i=i/j-a/j d=i%j i=i/j-a/j
a=i%j b=i%k/j c=i%l/k d=i%m/l d-=c/j c-=b/j b-=a/j
i-=a i/=j
i=(i-a)/j
i=i/j-a/j
i/=j i-=a/10
/--------//--------//--------//--------//--------//--------//--------/
i=:i j=10k=100l=k*j m=l*j n=",4,16,37,58,89,145,42,20,"o=","s="Sad"
a=i%j b=i%k/j c=i%l/k d=i%m/l d-=c/j c-=b/j b-=a/j i=a*a+b*b+c*c+d*d
p=(n-(o+i+o))!=n:o="Happy"q=i<2r=p+q if p then:o=s end:done=r goto2-r
i=i/z*y
a=i%10i=i/z*y
i/=z i*=y
a=i%w i=i/w-i%1
a=i%10i=(a-a%10)/10
/--------//--------//--------//--------//--------//--------//--------/
a=",1,7,10,13,19,23,28,31,32,44,49,68,70,79,82,86,91,94,97,100,103,10"
a+="9,129,130,133,139,167,176,188,190,192,193,203,208,219,226,230,236"
a+=",239,262,263,280,291,293,301,302,310,313,319,320,326,329,331,338,"
a+="356,362,365,367,368,376,379,383,386,391,392,397,404,409,440,446,4"
a+="64,469,478,487,490,496,536,556,563,565,566,608,617,622,623,632,63"
a+="5,637,638,644,649,653,655,656,665,671,673,680,683,694,700,709,716"
a+=",736,739,748,761,763,784,790,793,802,806,818,820,833,836,847,860,"
a+="863,874,881,888,899,901,904,907,910,912,913,921,923,931,932,937,9"
a+="40,946,964,970,973,989,998,1000,1003,1009,1029,1030,1033,1039,106"
a+="7,1076,1088,1090,1092,1093,1112,1114,1115,1121,1122,1125,1128,114"
a+="1,1148,1151,1152,1158,1177,1182,1184,1185,1188,1209,1211,1212,121"
a+="5,1218,1221,1222,1233,1247,1251,1257,1258,1274,1275,1277,1281,128"
a+="5,1288,1290,1299,1300,1303,1309,1323,1330,1332,1333,1335,1337,133"
a+="9,1353,1366,1373,1390,1393,1411,1418,1427,1444,1447,1448,1457,147"
a+="2,1474,1475,1478,1481,1484,1487,1511,1512,1518,1521,1527,1528,153"
a+="3,1547,1557,1572,1574,1575,1578,1581,1582,1587,1599,1607,1636,166"
a+="3,1666,1670,1679,1697,1706,1717,1724,1725,1727,1733,1742,1744,174"
a+="5,1748,1752,1754,1755,1758,1760,1769,1771,1772,1784,1785,1796,180"
a+="8,1812,1814,1815,1818,1821,1825,1828,1841,1844,1847,1851,1852,185"
a+="7,1874,1875,1880,1881,1882,1888,1900,1902,1903,1920,1929,1930,193"
a+="3,1959,1967,1976,1992,1995,":o="Sad"ifa-(","+:i+",")!=athen:o="Happy"end done=1goto21
/--------//--------//--------//--------//--------//--------//--------/ | LOLCODE | 2 | Dude112113/Yolol | YololEmulator/Scripts/HappyNumbers.lol | [
"MIT"
] |
%{
/*
* wc.lex : A simple example of using FLEX
* to create a wc-like utility.
*
* See MISC/fastwc/ in the flex distribution for examples
* of how to write this scanner for maximum performance.
*/
int numchars = 0;
int numwords = 0;
int numlines = 0;
int totchars = 0;
int totwords = 0;
int totlines = 0;
/*
* rules start from here
*/
%}
%%
[\n] { numchars++; numlines++; }
[\r] { numchars++; }
[^ \t\n]+ { numwords++; numchars += yyleng; }
. { numchars++; }
%%
/*
* additional C code start from here. This supplies
* all the argument processing etc.
*/
int main(int argc, char *argv[])
{
int loop,first=1;
int lflag = 0; /* 1 if we count # of lines */
int wflag = 0; /* 1 if we count # of words */
int cflag = 0; /* 1 if we count # of characters */
int fflag = 0; /* 1 if we have a file name */
for(loop=1; loop<argc; loop++){
if(argv[loop][0] == '-'){
switch(argv[loop][1]){
case 'l':
lflag = 1;
break;
case 'w':
wflag = 1;
break;
case 'c':
cflag = 1;
break;
default:
fprintf(stderr,"unknown option -%c\n",
argv[loop][1]);
}
}
}
if(lflag == 0 && wflag == 0 && cflag == 0){
lflag = wflag = cflag = 1; /* default to all on */
}
for(loop=1; loop<argc; loop++){
if(argv[loop][0] != '-'){
fflag = 1;
numlines = numchars = numwords = 0;
if((yyin = fopen(argv[loop],"rb")) != NULL){
if(first){
first = 0;
} else {
YY_NEW_FILE;
}
(void) yylex();
fclose(yyin);
totwords += numwords;
totchars += numchars;
totlines += numlines;
printf("file : %25s :",argv[loop]) ;
if(lflag){
fprintf(stdout,"lines %5d ",numlines);
}
if(cflag){
fprintf(stdout,"characters %5d ",numchars);
}
if(wflag){
fprintf(stdout,"words %5d ",numwords);
}
fprintf(stdout,"\n");
}else{
fprintf(stderr,"wc : file not found %s\n",argv[loop]);
}
}
}
if(!fflag){
fprintf(stderr,"usage : wc [-l -w -c] file [file...]\n");
fprintf(stderr,"-l = count lines\n");
fprintf(stderr,"-c = count characters\n");
fprintf(stderr,"-w = count words\n");
exit(1);
}
for(loop=0;loop<79; loop++){
fprintf(stdout,"-");
}
fprintf(stdout,"\n");
fprintf(stdout,"total : %25s ","") ;
if(lflag){
fprintf(stdout,"lines %5d ",totlines);
}
if(cflag){
fprintf(stdout,"characters %5d ",totchars);
}
if(wflag){
fprintf(stdout,"words %5d ",totwords);
}
fprintf(stdout,"\n");
return(0);
}
| Lex | 5 | DemiMarie/flex | examples/manual/wc.lex | [
"BSD-4-Clause-UC"
] |
// Copyright 2018 the V8 project authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#if !V8_ENABLE_WEBASSEMBLY
#error This header should only be included if WebAssembly is enabled.
#endif // !V8_ENABLE_WEBASSEMBLY
#ifndef V8_WASM_WASM_TIER_H_
#define V8_WASM_WASM_TIER_H_
#include <cstdint>
namespace v8 {
namespace internal {
namespace wasm {
// All the tiers of Wasm execution.
enum class ExecutionTier : int8_t {
kNone,
kLiftoff,
kTurbofan,
};
inline const char* ExecutionTierToString(ExecutionTier tier) {
switch (tier) {
case ExecutionTier::kTurbofan:
return "turbofan";
case ExecutionTier::kLiftoff:
return "liftoff";
case ExecutionTier::kNone:
return "none";
}
}
// {kForDebugging} is used for default tiered-down code, {kWithBreakpoints} if
// the code also contains breakpoints, and {kForStepping} for code that is
// flooded with breakpoints.
enum ForDebugging : int8_t {
kNoDebugging = 0,
kForDebugging,
kWithBreakpoints,
kForStepping
};
enum TieringState : int8_t { kTieredUp, kTieredDown };
} // namespace wasm
} // namespace internal
} // namespace v8
#endif // V8_WASM_WASM_TIER_H_
| C | 4 | EXHades/v8 | src/wasm/wasm-tier.h | [
"BSD-3-Clause"
] |
%%%
%%% Authors:
%%% Kevin Glynn <glynn@info.ucl.ac.be>
%%%
%%% Copyright:
%%% Kevin Glynn, 2003
%%%
%%% Last change:
%%% $Date$ by $Author$
%%% $Revision$
%%%
%%% This file is part of Mozart, an implementation
%%% of Oz 3
%%% http://www.mozart-oz.org
%%%
%%% See the file "LICENSE" or
%%% http://www.mozart-oz.org/LICENSE.html
%%% for information on usage and redistribution
%%% of this file, and for a DISCLAIMER OF ALL
%%% WARRANTIES.
%%%
%%% Random tests of the Uniform State Syntax
functor
export Return
define
Return =
state([
'class'(proc {$}
C = {NewCell 0}
class MyClass
attr a la
d cd
meth init()
a <- 0
end
meth manip()
C := @C + 1 % Increments contents of cell C
a := @a + 1 % Increments contents of attr A
a := @C % contents of attr A set to content of C
@C = 1
C := a % contents of C is name of cell A
a := @@C + 1 % (indirectly) increments contents of attr A
C := @a % update C with current content of A
a := C % contents of A is name of cell C
C := @@a + 1 % (indirectly) increments contents of cell C
la := a := C := 7
end
meth dict_manip()
d := {NewDictionary}
@d.k := 32 % Update dictionary in attr
cd := d % assign attr name d to cd
@@cd#j := 64 % indirectly update dict
64 = @@cd.j := 96
96 = @@cd#j := 128
end
meth test()
@a = 3
@@la = 7
@C=7
@(@d#j) = 128
(@@cd.k) = 32
end
end
M = {New MyClass init()}
in
{M manip()}
{M dict_manip()}
{M test()}
end
keys:[state syntax 'class' object])
dict(proc {$}
A={NewArray 0 50 ~1}
D={NewDictionary}
in
D.3 := 5
A.3 := 5
(if 5 < A.3 then D else A end).4 := 2
A.4 = @(D#3)-3
end
keys:[state syntax dictionary array])
cell(proc {$}
C = {NewCell ~1} V
in
V = C := 3
@C = V+4
end
keys:[state syntax cell])
])
end
| Oz | 5 | Ahzed11/mozart2 | platform-test/base/state.oz | [
"BSD-2-Clause"
] |
using Ryujinx.Graphics.Shader.IntermediateRepresentation;
namespace Ryujinx.Graphics.Shader.StructuredIr
{
class GotoStatement
{
public AstOperation Goto { get; }
public AstAssignment Label { get; }
public IAstNode Condition => Label.Destination;
public bool IsLoop { get; set; }
public bool IsUnconditional => Goto.Inst == Instruction.Branch;
public GotoStatement(AstOperation branch, AstAssignment label, bool isLoop)
{
Goto = branch;
Label = label;
IsLoop = isLoop;
}
}
} | C# | 4 | BSoDGamingYT/Ryujinx | Ryujinx.Graphics.Shader/StructuredIr/GotoStatement.cs | [
"MIT"
] |
mes 2,2,4
exp $etext
pro $etext,0
end 0
| Eiffel | 1 | wyan/ack | mach/em24/libend/etext.e | [
"BSD-3-Clause"
] |
CREATE TABLE public.article (
id integer NOT NULL,
title text NOT NULL,
content text NOT NULL,
created_at timestamp with time zone DEFAULT now() NOT NULL,
user_id uuid NOT NULL
);
CREATE SEQUENCE public.articles_id_seq
AS integer
START WITH 1
INCREMENT BY 1
NO MINVALUE
NO MAXVALUE
CACHE 1;
ALTER SEQUENCE public.articles_id_seq OWNED BY public.article.id;
CREATE TABLE public.knex_migrations (
id integer NOT NULL,
name character varying(255),
batch integer,
migration_time timestamp with time zone
);
CREATE SEQUENCE public.knex_migrations_id_seq
AS integer
START WITH 1
INCREMENT BY 1
NO MINVALUE
NO MAXVALUE
CACHE 1;
ALTER SEQUENCE public.knex_migrations_id_seq OWNED BY public.knex_migrations.id;
CREATE TABLE public.knex_migrations_lock (
index integer NOT NULL,
is_locked integer
);
CREATE SEQUENCE public.knex_migrations_lock_index_seq
AS integer
START WITH 1
INCREMENT BY 1
NO MINVALUE
NO MAXVALUE
CACHE 1;
ALTER SEQUENCE public.knex_migrations_lock_index_seq OWNED BY public.knex_migrations_lock.index;
CREATE TABLE public.role (
id uuid DEFAULT public.gen_random_uuid() NOT NULL,
name character varying(255) NOT NULL
);
CREATE TABLE public."user" (
id uuid DEFAULT public.gen_random_uuid() NOT NULL,
username character varying(255) NOT NULL,
password character varying(255) NOT NULL,
created_at timestamp with time zone DEFAULT now() NOT NULL,
active boolean DEFAULT true
);
CREATE TABLE public.user_role (
id uuid DEFAULT public.gen_random_uuid() NOT NULL,
role_id uuid,
user_id uuid
);
ALTER TABLE ONLY public.article ALTER COLUMN id SET DEFAULT nextval('public.articles_id_seq'::regclass);
ALTER TABLE ONLY public.knex_migrations ALTER COLUMN id SET DEFAULT nextval('public.knex_migrations_id_seq'::regclass);
ALTER TABLE ONLY public.knex_migrations_lock ALTER COLUMN index SET DEFAULT nextval('public.knex_migrations_lock_index_seq'::regclass);
ALTER TABLE ONLY public.article
ADD CONSTRAINT articles_pkey PRIMARY KEY (id);
ALTER TABLE ONLY public.knex_migrations_lock
ADD CONSTRAINT knex_migrations_lock_pkey PRIMARY KEY (index);
ALTER TABLE ONLY public.knex_migrations
ADD CONSTRAINT knex_migrations_pkey PRIMARY KEY (id);
ALTER TABLE ONLY public.role
ADD CONSTRAINT role_id_unique UNIQUE (id);
ALTER TABLE ONLY public.role
ADD CONSTRAINT role_name_unique UNIQUE (name);
ALTER TABLE ONLY public.role
ADD CONSTRAINT role_pkey PRIMARY KEY (id);
ALTER TABLE ONLY public."user"
ADD CONSTRAINT user_id_unique UNIQUE (id);
ALTER TABLE ONLY public."user"
ADD CONSTRAINT user_pkey PRIMARY KEY (id);
ALTER TABLE ONLY public.user_role
ADD CONSTRAINT user_role_id_unique UNIQUE (id);
ALTER TABLE ONLY public.user_role
ADD CONSTRAINT user_role_pkey PRIMARY KEY (id);
ALTER TABLE ONLY public."user"
ADD CONSTRAINT user_username_unique UNIQUE (username);
CREATE INDEX user_active_index ON public."user" USING btree (active);
CREATE INDEX user_role_role_id_index ON public.user_role USING btree (role_id);
CREATE INDEX user_role_user_id_index ON public.user_role USING btree (user_id);
ALTER TABLE ONLY public.article
ADD CONSTRAINT articles_user_id_fkey FOREIGN KEY (user_id) REFERENCES public."user"(id);
ALTER TABLE ONLY public.user_role
ADD CONSTRAINT user_role_role_id_foreign FOREIGN KEY (role_id) REFERENCES public.role(id);
ALTER TABLE ONLY public.user_role
ADD CONSTRAINT user_role_user_id_foreign FOREIGN KEY (user_id) REFERENCES public."user"(id);
| SQL | 3 | gh-oss-contributor/graphql-engine-1 | community/sample-apps/nextjs-8-serverless/with-apollo-jwt/hasura/migrations/default/1613664987061_init/up.sql | [
"Apache-2.0",
"MIT"
] |
#tag Class
Protected Class App
Inherits ConsoleApplication
#tag Event
Function Run(args() as String) As Integer
dim errCode as integer = kErrorNoError
RegisterSubApps
dim caller as string = args( 0 )
dim parser as new OptionParser( "kaju", "Administer Kaju files" )
dim o as new Option( "", kOptionFile, "The admin file", Option.OptionType.File )
parser.AddOption o
dim appArgs() as string
appArgs.Append caller
for i as integer = 1 to args.Ubound
dim arg as string = args( i )
if SubAppDictionary.HasKey( arg ) then
exit for i
else
appArgs.Append arg
end if
next
parser.Parse appArgs
if args.Ubound = 0 or parser.HelpRequested then
PrintMainHelp parser
else
//
// Adjust the args
//
for i as integer = 0 to appArgs.Ubound
args.Remove 0
next
//
// The first argument must be the subapp name
//
dim subappKey as string = args( 0 )
dim subapp as SubApplication = SubAppDictionary.Value( subappKey )
//
// Substitute the caller for the subapp name
//
args( 0 ) = caller
//
// Get the options
//
dim subappParser as new OptionParser( subappKey, "" )
subapp.AddOptions subappParser
subappParser.Parse args
if subappParser.HelpRequested then
PrintSubAppHelp( subappKey, subapp, subappParser )
else
//
// Get the admin file
//
dim adminFile as FolderItem = parser.FileValue( kOptionFile )
if adminFile is nil then
print "No admin file specified"
errCode = kErrorGeneralError
else
try
errCode = subapp.Execute( adminFile, subappParser )
catch err as RuntimeException
if err isa EndException or err isa ThreadEndException then
raise err
end if
print "Unexpected error: " + err.Message
errCode = kErrorGeneralError
end try
end if
end if
end if
#if DebugBuild and TargetRemoteDebugger then
print "Press return to continue..."
call input
#endif
return errCode
End Function
#tag EndEvent
#tag Method, Flags = &h21
Private Function PadRight(s As String, width As Integer) As String
if s.Len >= width then
return s
end if
static padder as string = " "
while padder.Len < width
padder = padder + padder
wend
s = s + padder.Left( width - s.Len )
return s
End Function
#tag EndMethod
#tag Method, Flags = &h21
Private Sub PrintMainHelp(parser As OptionParser)
// Have to get the names and descriptions of each of the registered subapps
dim subAppNameMaxLen as integer = 0
dim subAppNames() as string
dim subAppDescriptions() as string
for each k as variant in SubAppDictionary.Keys
dim subApp as SubApplication = SubAppDictionary.Value( k )
if k.StringValue.Len > subAppNameMaxLen then
subAppNameMaxLen = k.StringValue.Len
end if
subAppNames.Append k.StringValue
dim thisDescription as string = subApp.Description
subAppDescriptions.Append thisDescription
next
subAppNames.SortWith subAppDescriptions
print LongVersion
print ""
print "Usage: " + kAppName + " [global-parameters] app-key [parameters]"
print ""
if parser isa OptionParser then
parser.ShowHelp( "Global parameters" )
print ""
end if
print "Where app-key is:"
for i as integer = 0 to subAppNames.Ubound
print " " + PadRight( subAppNames( i ), subAppNameMaxLen ) + " - " + subAppDescriptions( i )
next
print ""
print "For help on a given application, use:"
print " " + kAppName + " app-key --help"
print ""
End Sub
#tag EndMethod
#tag Method, Flags = &h21
Private Sub PrintSubAppHelp(subAppName As String, subApp As SubApplication, options As OptionParser)
dim subAppUsage as string = subApp.Usage
if subAppUsage = "" then
if options.ExtrasRequired > 0 then
for i as integer = 1 to options.ExtrasRequired
subAppUsage = subAppUsage + " /path/to/file" + Str( i )
next i
subAppUsage = subAppUsage + EndOfLine + " [/path/to/file ...]"
end if
end if
print LongVersion
print ""
print "Usage:"
print " " + kAppName + " --file /path/to/admin/file " + _
subAppName + " [parameters]" + if( subAppUsage <> "", " " + subAppUsage.Trim, "" )
if options <> nil then
options.AdditionalHelpNotes = subApp.AdditionalHelp
print ""
print "Help for:"
options.ShowHelp()
end if
End Sub
#tag EndMethod
#tag Method, Flags = &h21
Private Sub RegisterSubApps()
SubAppDictionary = new Dictionary
SubAppDictionary.Value( kAppCreate ) = new CreateApp
SubAppDictionary.Value( kAppGetKey ) = new GetKeyApp
SubAppDictionary.Value( kAppExport ) = new ExportApp
SubAppDictionary.Value( kAppListVersions ) = new ListVersionsApp
SubAppDictionary.Value( kAppAddVersion ) = new AddVersionApp
SubAppDictionary.Value( kAppDeleteVersion ) = new DeleteVersionApp
SubAppDictionary.Value( kAppVersionInfo ) = new VersionInfoApp
SubAppDictionary.Value( kAppEditVersion ) = new EditVersionApp
End Sub
#tag EndMethod
#tag Property, Flags = &h21
Private SubAppDictionary As Dictionary
#tag EndProperty
#tag Constant, Name = kAppAddVersion, Type = String, Dynamic = False, Default = \"addversion", Scope = Private
#tag EndConstant
#tag Constant, Name = kAppCreate, Type = String, Dynamic = False, Default = \"create", Scope = Private
#tag EndConstant
#tag Constant, Name = kAppDeleteVersion, Type = String, Dynamic = False, Default = \"deleteversion", Scope = Private
#tag EndConstant
#tag Constant, Name = kAppEditVersion, Type = String, Dynamic = False, Default = \"editversion", Scope = Private
#tag EndConstant
#tag Constant, Name = kAppExport, Type = String, Dynamic = False, Default = \"export", Scope = Private
#tag EndConstant
#tag Constant, Name = kAppGetKey, Type = String, Dynamic = False, Default = \"getkey", Scope = Private
#tag EndConstant
#tag Constant, Name = kAppListVersions, Type = String, Dynamic = False, Default = \"listversions", Scope = Private
#tag EndConstant
#tag Constant, Name = kAppName, Type = String, Dynamic = False, Default = \"", Scope = Public
#Tag Instance, Platform = Mac OS, Language = Default, Definition = \"kaju"
#Tag Instance, Platform = Linux, Language = Default, Definition = \"kaju"
#Tag Instance, Platform = Windows, Language = Default, Definition = \"kaju.exe"
#tag EndConstant
#tag Constant, Name = kAppVersionInfo, Type = String, Dynamic = False, Default = \"versioninfo", Scope = Private
#tag EndConstant
#tag Constant, Name = kErrorGeneralError, Type = Double, Dynamic = False, Default = \"1", Scope = Public
#tag EndConstant
#tag Constant, Name = kErrorNoError, Type = Double, Dynamic = False, Default = \"0", Scope = Public
#tag EndConstant
#tag Constant, Name = kOptionFile, Type = String, Dynamic = False, Default = \"file", Scope = Private
#tag EndConstant
#tag ViewBehavior
#tag EndViewBehavior
End Class
#tag EndClass
| Xojo | 5 | joneisen/Kaju | Kaju Admin CLI/App.xojo_code | [
"MIT"
] |
% Example using TXL 10.5a source coordinate extensions to extract
% a table of all method definitions with source coordinates
% Jim Cordy, January 2008
% Revised Nov 2012 - remove @Override annotations from clone comparison - JRC
% Revised Aug 2012 - disallow ouput forms in input parse - JRC
% Revised July 2011 - ignore BOM headers in source
% Revised 25.03.11 - match constructors as methods - JRC
% Revised 30.04.08 - unmark embedded functions - JRC
% Using Java 5 grammar
include "Cpp.grm"
% Ignore BOM headers from Windows
include "bom.grm"
% Redefinitions to collect source coordinates for function definitions as parsed input,
% and to allow for XML markup of function definitions as output
% Modified to match constructors as well. Even though the grammar still
% has constructor_declaration in it, this one will match first. - JRC 25mar11
%define function_definition
% [NL] [opt decl_specifiers] [function_declarator] [opt ctor_initializer]
% [opt exception_specification] [function_body]
%end define
%define function_declarator
% % like [declarator], but requires a [declarator_extension]
% [repeat pointer_operator] [declared_item] [repeat declarator_extension+]
%end define
%define function_body
% [NL] [compound_statement] [opt ';] [NL]
% | 'try [opt ctor_initializer] [NL] [compound_statement] [opt ';] [NL] [handler_list]
%end define
%define compound_statement
% '{ [NL][IN]
% [opt statement_list] [EX]
% '} [NL]
%end define
redefine function_definition
% Input form
[srcfilename] [srclinenumber] % Keep track of starting file and line number
[NL] [opt decl_specifiers] [function_declarator] [opt ctor_initializer]
[opt exception_specification]
'{ [IN][NL]
[opt statement_list] [EX]
[srcfilename] [srclinenumber] % Keep track of ending file and line number
'}
% |
% Output form
% [not token] % disallow output form in input parse
% [opt xml_source_coordinate]
% [NL] [opt decl_specifiers] [function_declarator] [opt ctor_initializer]
% [opt exception_specification]
% '{ [IN][NL]
% [opt statement_list] [EX]
% '}
% [opt end_xml_source_coordinate]
|
[srcfilename] [srclinenumber]
[NL] [opt decl_specifiers] [function_declarator] [opt ctor_initializer]
[opt exception_specification] [function_body]
[srcfilename] [srclinenumber]
|
[not token]
[opt xml_source_coordinate]
% [NL] [opt decl_specifiers] [function_declarator] [opt ctor_initializer]
% [opt exception_specification] [function_body]
% [opt end_xml_source_coordinate]
end redefine
define xml_source_coordinate
'< [SPOFF] 'source [SP] 'file=[stringlit] [SP] 'startline=[stringlit] [SP] 'endline=[stringlit] '> [SPON] [NL]
end define
define end_xml_source_coordinate
[NL] '< [SPOFF] '/ 'source '> [SPON] [NL]
end define
redefine program
...
| [repeat function_definition]
end redefine
% Main function - extract and mark up function definitions from parsed input program
function main
replace [program]
P [program]
construct Functions [repeat function_definition]
_ [^ P] % Extract all functions from program
[convertFunctionDefinitions] [convertFunctionDefinitions2] % Mark up with XML
by
Functions
end function
rule convertFunctionDefinitions
% Find each function definition and match its input source coordinates
replace [function_definition]
FileName [srcfilename] LineNumber [srclinenumber]
Decl [opt decl_specifiers] FunctionHeader [function_declarator] CtorInit[opt ctor_initializer]
Except [opt exception_specification]
'{
FunctionBody [opt statement_list]
EndFileName [srcfilename] EndLineNumber [srclinenumber]
'}
% Convert file name and line numbers to strings for XML
construct FileNameString [stringlit]
_ [quote FileName]
construct LineNumberString [stringlit]
_ [quote LineNumber]
construct EndLineNumberString [stringlit]
_ [quote EndLineNumber]
% Output is XML form with attributes indicating input source coordinates
construct XmlHeader [xml_source_coordinate]
<source file=FileNameString startline=LineNumberString endline=EndLineNumberString>
by
XmlHeader
% Decl
% FunctionHeader
% CtorInit
% Except
% '{
% FunctionBody [unmarkEmbeddedFunctionDefinitions]
% '}
% </source>
end rule
rule convertFunctionDefinitions2
% Find each function definition and match its input source coordinates
replace [function_definition]
FileName [srcfilename] LineNumber [srclinenumber]
Decl [opt decl_specifiers] FunctionHeader [function_declarator] CtorInit [opt ctor_initializer]
Except [opt exception_specification] FunctionBody [function_body]
EndFileName [srcfilename] EndLineNumber [srclinenumber]
% Convert file name and line numbers to strings for XML
construct FileNameString [stringlit]
_ [quote FileName]
construct LineNumberString [stringlit]
_ [quote LineNumber]
construct EndLineNumberString [stringlit]
_ [quote EndLineNumber]
% Output is XML form with attributes indicating input source coordinates
construct XmlHeader [xml_source_coordinate]
<source file=FileNameString startline=LineNumberString endline=EndLineNumberString>
by
XmlHeader
% Decl
% FunctionHeader
% CtorInit
% Except
% FunctionBody [unmarkEmbeddedFunctionDefinitions]
% </source>
end rule
%rule unmarkEmbeddedFunctionDefinitions
% replace [function_definition]
% FileName [srcfilename] LineNumber [srclinenumber]
% Decl [opt decl_specifiers] FunctionHeader [function_declarator] CtorInit[opt ctor_initializer]
% Except [opt exception_specification]
% '{
% FunctionBody [opt statement_list]
% EndFileName [srcfilename] EndLineNumber [srclinenumber]
% '}
% by
% Decl
% FunctionHeader
% CtorInit
% Except
% '{
% FunctionBody
% '}
%end rule
| TXL | 4 | coder-chenzhi/SQA | SourcererCC/parser/java/txl/cpp-extract-function-headers.txl | [
"Apache-2.0"
] |
n factorial
| Self | 0 | LaudateCorpus1/RosettaCodeData | Task/Factorial/Self/factorial-1.self | [
"Info-ZIP"
] |
### /product-spu/page 计算商品 SKU 价格
GET {{shop-api-base-url}}/product-sku/cal-price?id=33
Content-Type: application/x-www-form-urlencoded
###
| HTTP | 3 | ssSlowDown/onemall | shop-web-app/src/main/java/cn/iocoder/mall/shopweb/controller/product/ProductSkuController.http | [
"MulanPSL-1.0"
] |
<?xml version="1.0" encoding="iso8859-5"?>
<xsl:stylesheet xmlns:xsl="http://www.w3.org/1999/XSL/Transform" version="1.0">
<xsl:output method="html" encoding="KOI8-R"/>
<xsl:template match="TEST">
<html>
<body>
<script>
if (window.testRunner)
testRunner.dumpAsText();
//alert(document.characterSet); // works in Firefox
document.write("Encoding: " + document.characterSet);
</script>
</body>
</html>
</xsl:template>
</xsl:stylesheet>
| XSLT | 3 | zealoussnow/chromium | third_party/blink/web_tests/fast/xsl/xslt-doc-enc.xsl | [
"BSD-3-Clause-No-Nuclear-License-2014",
"BSD-3-Clause"
] |
# documentation_formatters.fy
# Example of fancy's documentation formatters
foo = Object new
Fancy Documentation for: foo is: """
## Fancy documentation.
It should be possible to set documentation for any _arbitray object_.
Doing so will expose it on the API documents.
This can be useful for constants, or singleton objects.
## Fancy Documentation formatters
Fancy you to create custom documentation formatters,
thus, allowing you to display an object document in a well
formatted way for different environments, eg. when using the
interactive REPL you may want *ANSI* _colored_ output, or maybe
we create a tool to generate MAN(1) pages, and fdoc tool
to generate HTML API docs.
"""
Fancy Documentation for: foo . format: 'markdown . println
| Fancy | 3 | bakkdoor/fancy | examples/documentation_formatters.fy | [
"BSD-3-Clause"
] |
div#\30 {} | CSS | 1 | mengxy/swc | crates/swc_css_parser/tests/fixture/esbuild/misc/hfprsTDi2yEOOmPdjb8Cew/input.css | [
"Apache-2.0"
] |
#SingleInstance force
#Persistent
#include Lib\AutoHotInterception.ahk
; Demonstrates Subscribe / Unsubscribe (Turn on/off block) dependent on active window
; Block is active in Notepad, inactive otherwise
AHI := new AutoHotInterception()
keyboardId := AHI.GetKeyboardId(0x04F2, 0x0112)
SetTimer, WatchWin, -0
return
KeyEvent(state){
ToolTip % "State: " state
}
DoSub(state){
global AHI, keyboardId
if (state){
AHI.SubscribeKey(keyboardId, GetKeySC("1"), true, Func("KeyEvent"))
} else {
AHI.UnsubscribeKey(keyboardId, GetKeySC("1"))
}
}
WatchWin:
Loop {
WinWaitActive, ahk_class Notepad
DoSub(true)
WinWaitNotActive, ahk_class Notepad
DoSub(false)
}
return
^Esc::
ExitApp | AutoHotkey | 4 | AlexVallat/AutoHotInterception | Unsubscription Example.ahk | [
"MIT"
] |
<?xml version="1.0" encoding="UTF-8"?>
<!-- ******************************************************************* -->
<!-- -->
<!-- © Copyright IBM Corp. 2010, 2012 -->
<!-- -->
<!-- Licensed under the Apache License, Version 2.0 (the "License"); -->
<!-- you may not use this file except in compliance with the License. -->
<!-- You may obtain a copy of the License at: -->
<!-- -->
<!-- http://www.apache.org/licenses/LICENSE-2.0 -->
<!-- -->
<!-- Unless required by applicable law or agreed to in writing, software -->
<!-- distributed under the License is distributed on an "AS IS" BASIS, -->
<!-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or -->
<!-- implied. See the License for the specific language governing -->
<!-- permissions and limitations under the License. -->
<!-- -->
<!-- ******************************************************************* -->
<faces-config>
<faces-config-extension>
<namespace-uri>http://www.ibm.com/xsp/coreex</namespace-uri>
<default-prefix>xe</default-prefix>
<designer-extension>
<control-subpackage-name>calendar</control-subpackage-name>
</designer-extension>
</faces-config-extension>
<group>
<group-type>com.ibm.xsp.extlib.group.calendar.storeTitle</group-type>
<property>
<description>The accessibilty title of this data source, used when there are multiple data sources providing calendar entries. When entries from this data source receive focus, this title will be made available to accessibility tools (like screen reader software). This is supplemental to the practice of using different colors for entries from different data sources.</description>
<display-name>Provider Title</display-name>
<property-name>storeTitle</property-name>
<property-class>java.lang.String</property-class>
<property-extension>
<localizable>true</localizable>
<designer-extension>
<category>accessibility</category>
<!-- This is a screen reader accessibility title for this data source,
dynamically set as a title attribute whenever
an entry from this data source receives focus.
(see the function getTitleForScreenReader in the calendar widget) -->
<!-- This is not a server requestScope variable name
(junit test detects the description contains 'will be made available') -->
<tags>
not-server-variable-name
</tags>
</designer-extension>
</property-extension>
</property>
<group-extension>
<designer-extension>
<tags>
group-in-control
</tags>
</designer-extension>
</group-extension>
</group>
<group>
<group-type>com.ibm.xsp.extlib.group.calendar.jsId</group-type>
<property>
<!-- key: property.jsId.component.calendarView. -->
<description>Overrides the default browser JavaScript variable name for this control</description>
<display-name>JavaScript ID</display-name>
<property-name>jsId</property-name>
<property-class>java.lang.String</property-class>
<property-extension>
<designer-extension>
<category>basics</category>
<!-- Note, we should probably document for the users:
the jsId is not clientId namespaced, so if you include 2 calendarStores
with the same jsId on the same page, it won't work. -->
<!-- This is declaring a jsId (not referencing an existing jsId that's declared elsewhere) -->
<!-- Should maybe have need some kind of editor. Not sure such editors have been provided
by the Designer team - maybe ask them to build editor extensions. -->
<!-- This is not a control reference, it is a declaration of a JavaScript variable -->
<!-- This is not a server requestScope variable name,
it's a client-side browser variable name.-->
<tags>
not-control-id-reference
not-server-variable-name
</tags>
</designer-extension>
</property-extension>
</property>
<group-extension>
<designer-extension>
<tags>group-in-control</tags>
</designer-extension>
</group-extension>
</group>
<!-- View Grid -->
<component>
<description>A control that displays an iNotes calendar</description>
<display-name>iNotes Calendar</display-name>
<component-type>com.ibm.xsp.extlib.calendar.CalendarView</component-type>
<component-class>com.ibm.xsp.extlib.component.calendar.UICalendarView</component-class>
<group-type-ref>com.ibm.xsp.group.core.prop.style</group-type-ref>
<group-type-ref>com.ibm.xsp.group.core.prop.styleClass</group-type-ref>
<group-type-ref>com.ibm.xsp.extlib.group.calendar.jsId</group-type-ref>
<property>
<!--# Do not translate quoted letters, "D", "T", ... etc ... , "Y"-->
<description>Identifies which calendar display format to use, where "D" = one day, "T" = two days, "F" = five day week, "W"= full week, 2 = two weeks, "M" = month, "Y" = year</description>
<display-name>Type</display-name>
<property-name>type</property-name>
<property-class>java.lang.String</property-class>
<property-extension>
<designer-extension>
<category>basics</category>
<editor>com.ibm.workplace.designer.property.editors.comboParameterEditor</editor>
<editor-parameter>
D
T
F
W
2
M
Y
</editor-parameter>
</designer-extension>
</property-extension>
</property>
<property>
<description>Identifies the control used as the calendar data store. The nominated control will be used to populate the calendar control.</description>
<display-name>Calendar Data Store Identifier</display-name>
<property-name>storeComponentId</property-name>
<property-class>java.lang.String</property-class>
<property-extension>
<designer-extension>
<category>basics</category>
<editor>com.ibm.xsp.extlib.designer.tooling.editor.XPageControlIDEditor</editor>
<editor-parameter>
http://www.ibm.com/xsp/coreex|notesCalendarStore
http://www.ibm.com/xsp/coreex|iCalReadStore
</editor-parameter>
</designer-extension>
</property-extension>
</property>
<property>
<description>Show only the entries in a list, instead of the grid showing all of the possible timeslots where entry meetings may occur</description>
<display-name>Summarize</display-name>
<property-name>summarize</property-name>
<property-class>boolean</property-class>
<property-extension>
<designer-extension>
<category>basics</category>
</designer-extension>
</property-extension>
</property>
<property>
<description>The date initially selected in the calendar. Defaults to today.</description>
<display-name>Date</display-name>
<property-name>date</property-name>
<property-class>java.util.Date</property-class>
<property-extension>
<designer-extension>
<category>basics</category>
</designer-extension>
</property-extension>
</property>
<property>
<description>Action handler when an entry is going to be created</description>
<display-name>New Entry Action</display-name>
<property-name>onNewEntry</property-name>
<!-- Needs better description and Designer support has issues - xp v xe and EventHandler support -->
<property-class>java.lang.String</property-class>
<property-extension>
<designer-extension>
<category>events</category>
<tags>todo</tags>
<event>true</event>
<subcategory>container-event</subcategory>
</designer-extension>
</property-extension>
</property>
<property>
<description>Action handler when an entry is going to be opened</description>
<display-name>Open Entry Action</display-name>
<!-- Needs better description and Designer support has issues - xp v xe and EventHandler support -->
<property-name>onOpenEntry</property-name>
<property-class>java.lang.String</property-class>
<property-extension>
<designer-extension>
<category>events</category>
<tags>todo</tags>
<event>true</event>
<subcategory>container-event</subcategory>
</designer-extension>
</property-extension>
</property>
<property>
<description>Action handler when an entry is selected</description>
<display-name>Select Entry Action</display-name>
<!-- Needs better description and Designer support has issues - xp v xe and EventHandler support -->
<property-name>onSelectEntry</property-name>
<property-class>java.lang.String</property-class>
<property-extension>
<designer-extension>
<category>events</category>
<tags>todo</tags>
<event>true</event>
<subcategory>container-event</subcategory>
</designer-extension>
</property-extension>
</property>
<property>
<description>Action handler when an entry is going to be deleted</description>
<display-name>Delete Entry Action</display-name>
<property-name>onDeleteEntry</property-name>
<!-- Needs better description and Designer support has issues - xp v xe and EventHandler support -->
<property-class>java.lang.String</property-class>
<property-extension>
<designer-extension>
<category>events</category>
<tags>todo</tags>
<event>true</event>
<subcategory>container-event</subcategory>
</designer-extension>
</property-extension>
</property>
<property>
<description>Action handler when context menu needs to be launched</description>
<display-name>Handle Context Menu</display-name>
<!-- Needs better description and investigate feature behaviour -->
<property-name>onContextMenu</property-name>
<property-class>java.lang.String</property-class>
<property-extension>
<designer-extension>
<category>events</category>
<tags>todo</tags>
<event>true</event>
<subcategory>container-event</subcategory>
</designer-extension>
</property-extension>
</property>
<property>
<description>The event when an entry is going to be rescheduled</description>
<display-name>On Reschedule Entry</display-name>
<!-- Needs better description and investigate feature behaviour -->
<property-name>onRescheduleEntry</property-name>
<property-class>java.lang.String</property-class>
<property-extension>
<designer-extension>
<category>events</category>
<tags>todo</tags>
<event>true</event>
<subcategory>container-event</subcategory>
</designer-extension>
</property-extension>
</property>
<property>
<description>The event when the view is going to be changed (e.g. yearview)</description>
<display-name>On Change View</display-name>
<property-name>onChangeView</property-name>
<!-- Needs better description and investigate feature behaviour -->
<property-class>java.lang.String</property-class>
<property-extension>
<designer-extension>
<category>events</category>
<tags>todo</tags>
<event>true</event>
<subcategory>container-event</subcategory>
</designer-extension>
</property-extension>
</property>
<component-extension>
<base-component-type>javax.faces.Component</base-component-type>
<component-family>com.ibm.xsp.extlib.calendar.CalendarView</component-family>
<renderer-type>com.ibm.xsp.extlib.calendar.CalendarView</renderer-type>
<tag-name>calendarView</tag-name>
<designer-extension>
<in-palette>true</in-palette>
<!--TODO update description with more detail-->
<tags>
todo
</tags>
<category>iNotes</category>
</designer-extension>
</component-extension>
</component>
<!-- Data Store -->
<component>
<description>Data store for Notes calendar view</description>
<!--TODO *** WHAT IS THIS AND DO WE NEED IT ??? *** -->
<display-name>Notes Calendar Store</display-name>
<!--TODO update description with more detail - 2 or 3 lines are okay -->
<component-type>com.ibm.xsp.extlib.calendar.NotesCalendarStore</component-type>
<component-class>com.ibm.xsp.extlib.component.calendar.UINotesCalendarStore</component-class>
<group-type-ref>com.ibm.xsp.extlib.group.FacesDojoComponent</group-type-ref>
<group-type-ref>com.ibm.xsp.extlib.group.calendar.storeTitle</group-type-ref>
<group-type-ref>com.ibm.xsp.extlib.group.calendar.jsId</group-type-ref>
<property>
<description>Domino database name</description>
<display-name>Database Name</display-name>
<property-name>databaseName</property-name>
<property-class>java.lang.String</property-class>
<property-extension>
<designer-extension>
<category>basics</category>
<!-- TODO is this the same as the xe:dominoView databaseName - allowing server prefixes etc?
if so should reuse a common group as this is used elsewhere,
also description should be updated to explain different values allowable here
also if the Designer team have any editor for this property, should be used here
-->
<!-- TODO why doesn't this have a viewName property?-->
<tags>
todo
</tags>
</designer-extension>
</property-extension>
</property>
<component-extension>
<base-component-type>javax.faces.Component</base-component-type>
<component-family>com.ibm.xsp.extlib.calendar.CalendarStore</component-family>
<renderer-type>com.ibm.xsp.extlib.calendar.NotesCalendarStore</renderer-type>
<tag-name>notesCalendarStore</tag-name>
<designer-extension>
<in-palette>true</in-palette>
<category>iNotes</category>
<!-- TODO description doesn't explain what a "Store" is - that it's a Dojo
object used in the browser to access data, usually retrieving it from serverside
in a lazy manner, as different days/weeks/months are displayed. -->
<!-- TODO using category>iNotes<, would probably be best if it didn't reference iNotes,
as you don't need iNotes enabled serverside for this data store to work.-->
<tags>
todo
</tags>
</designer-extension>
</component-extension>
</component>
<component>
<description>Data store for iCal store</description>
<!--TODO *** WHAT IS THIS AND DO WE NEED IT ??? *** -->
<display-name>iCal Store</display-name>
<component-type>com.ibm.xsp.extlib.calendar.iCalReadStore</component-type>
<component-class>com.ibm.xsp.extlib.component.calendar.UIiCalReadStore</component-class>
<group-type-ref>com.ibm.xsp.extlib.group.calendar.storeTitle</group-type-ref>
<group-type-ref>com.ibm.xsp.extlib.group.calendar.jsId</group-type-ref>
<property>
<description>URL for iCal repository</description>
<display-name>URL</display-name>
<property-name>url</property-name>
<property-class>java.lang.String</property-class>
<property-extension>
<designer-extension>
<category>basics</category>
<!-- TODO description should have more detail. -->
<!-- TODO should have an editor - I believe we use the FilePicker
editor as the generic resource editor. - talk To Dan O'Connor
who is working on resource properties. -->
<tags>
todo
</tags>
</designer-extension>
</property-extension>
</property>
<property>
<description>Background color for the calendar events</description>
<display-name>Background Color</display-name>
<property-name>bgColorMeeting</property-name>
<property-class>java.lang.String</property-class>
<property-extension>
<designer-extension>
<category>format</category>
<!-- TODO this description is using "event" to mean, meeting or appointment or other calendar entry,
but that is confusing in the XPages context, where "event" usually means event property
appearing in the Events view, like onClick, onSelect, onMouseOver, etc.
Same problem applies to the next 2 descriptions -->
<!-- TODO should get this property and the next 2 properties merged into
properties for meetingStyle, meetingStyleClass, as the bgcolor, borderColor and fontColor
are all part of styles, so if the style/styleClass properties are used, then
there will be available editors for editing them in Designer.
Else need to figure out editors for these 3 properties. -->
<!-- TODO are these properties re-computed for every meeting, so that different meetings
can be in different colors, or are they only computed once?
If they are re-computable, then the description should mention it.-->
<!-- TODO for accessibility (blind people) information cannot be conveyed solely
by color - there has to be some textual prefix or other text hint for screen
readers. Are these colors intended to provide information? (e.g. that the event
is a meeting or appointment, that they are in the past, disabled, etc)
If so then there needs to be some other way of providing that information. -->
<tags>
todo
</tags>
</designer-extension>
</property-extension>
</property>
<property>
<description>Border color for the event</description>
<display-name>Border Color</display-name>
<property-name>borderColorMeeting</property-name>
<property-class>java.lang.String</property-class>
<property-extension>
<designer-extension>
<category>format</category>
</designer-extension>
</property-extension>
</property>
<property>
<description>Font color for the event</description>
<display-name>Font Color</display-name>
<property-name>fontColorMeeting</property-name>
<property-class>java.lang.String</property-class>
<property-extension>
<designer-extension>
<category>format</category>
</designer-extension>
</property-extension>
</property>
<component-extension>
<base-component-type>javax.faces.Component</base-component-type>
<component-family>com.ibm.xsp.extlib.calendar.CalendarStore</component-family>
<renderer-type>com.ibm.xsp.extlib.calendar.iCalReadStore</renderer-type>
<tag-name>iCalReadStore</tag-name>
<designer-extension>
<in-palette>true</in-palette>
<category>iNotes</category>
<!-- TODO description doesn't explain what iCal is - that it is a data interchange format
for calendar and appointment passing between different programs, and is used to allow
calendar data to be displayed from non-Notes web servers and from 3rd party programs, like Google Calendar.-->
<!-- TODO why is it called iCalReadStore - why Read - is it read-only? if so, is there support
in the calendar widget for read-only stores, or it allow you to drag and drop to reschedule an event
and you get some sort of error when it can't reschedule? If we're supplying a read-only store,
then the calendar UI widget should support read-only display, and should detect
from the store that the store is read-only. -->
<tags>
todo
</tags>
</designer-extension>
</component-extension>
</component>
</faces-config>
| XPages | 4 | jesse-gallagher/XPagesExtensionLibrary | extlib/lwp/product/runtime/eclipse/plugins/com.ibm.xsp.extlib.domino/src/com/ibm/xsp/extlib/config/raw-dwa-calendar.xsp-config | [
"Apache-2.0"
] |
# Copyright (c) 2018-2021, Carnegie Mellon University
# See LICENSE for details
Class(VWrapBase, rec(
opts := (self, t, opts) >> opts,
));
Class(VWrapId, VWrapBase, rec(
__call__ := self >> self,
wrap := (self,r,t,opts) >> r,
twrap := (self,t,opts) >> t
));
Class(DPWrapper, SumsBase, BaseContainer, rec(
_short_print := true,
new := (self, spl, wrap) >> SPL(WithBases(self, rec(
_children := [spl],
dimensions := spl.dimensions,
wrap := wrap,
))),
rChildren := self >> [self._children[1], self.wrap],
rSetChild := meth(self, n, newC)
if n=1 then self._children[1] := newC;
elif n=2 then self.wrap := newC;
else Error("<n> must be in [1..2]");
fi;
end,
sums := self >> self._children[1].sums(),
print := (self, i, is) >> Cond(self._short_print,
Print(self.__name__, "(", self._children[1].print(i+is, is), ", ", self.wrap, ")"),
Print(self.__name__, "(\n", Blanks(i+is),
self._children[1].print(i+is, is), ",\n", Blanks(i+is), self.wrap, "\n", Blanks(i), ")")),
HashId := self >> let(h := [ When(IsBound(self._children[1].HashId), self._children[1].HashId(), self._children[1]) ],
When(IsBound(self.tags), Concatenation(h, self.tags), h)),
vcost := self >> self.child(1).vcost()
));
#F DPSWrapper - wrapper for stackable VWraps;
#F all stackable wrappers applied to formula in _DPSPLRec, innermost first.
Class(DPSWrapper, DPWrapper);
ClassSPL.setWrap := (self, wrap) >> DPWrapper(self, wrap).takeAobj(self);
ClassSPL.addWrap := (self, wrap) >> DPSWrapper(self, wrap).takeAobj(self);
| GAP | 4 | sr7cb/spiral-software | namespaces/spiral/spl/vwrap.gi | [
"BSD-2-Clause-FreeBSD"
] |
open nat
example {k n m : ℕ} (h : k + n ≤ k + m) : n ≤ m :=
match le.dest h with
| ⟨w, hw⟩ := @le.intro _ _ w
begin
-- hw is beta reduced after we added the equation compiler preprocessor.
-- So, this test is not really relevant anymore.
rw [nat.add_assoc] at hw,
apply nat.add_left_cancel hw
end
end
| Lean | 4 | JLimperg/lean | tests/lean/bad_error2.lean | [
"Apache-2.0"
] |
<div
#tooltip
class="mdc-tooltip mdc-tooltip--shown mat-mdc-tooltip"
[ngClass]="tooltipClass"
(animationend)="_handleAnimationEnd($event)"
[class.mdc-tooltip--multiline]="_isMultiline">
<div class="mdc-tooltip__surface mdc-tooltip__surface-animation">{{message}}</div>
</div>
| HTML | 4 | Achilles1515/components | src/material-experimental/mdc-tooltip/tooltip.html | [
"MIT"
] |
[[actuator.tracing]]
== HTTP Tracing
You can enable HTTP Tracing by providing a bean of type `HttpTraceRepository` in your application's configuration.
For convenience, Spring Boot offers `InMemoryHttpTraceRepository`, which stores traces for the last 100 (the default) request-response exchanges.
`InMemoryHttpTraceRepository` is limited compared to other tracing solutions, and we recommend using it only for development environments.
For production environments, we recommend using a production-ready tracing or observability solution, such as Zipkin or Spring Cloud Sleuth.
Alternatively, you can create your own `HttpTraceRepository`.
You can use the `httptrace` endpoint to obtain information about the request-response exchanges that are stored in the `HttpTraceRepository`.
[[actuator.tracing.custom]]
=== Custom HTTP tracing
To customize the items that are included in each trace, use the configprop:management.trace.http.include[] configuration property.
For advanced customization, consider registering your own `HttpExchangeTracer` implementation.
| AsciiDoc | 4 | techAi007/spring-boot | spring-boot-project/spring-boot-docs/src/docs/asciidoc/actuator/tracing.adoc | [
"Apache-2.0"
] |
@0x99f75f775fe63dae;
struct TestRecordStruct
{
id @0 : Int64;
blockNo @1 : UInt16;
val1 @2 : Text;
val2 @3 : Float32;
val3 @4 : UInt8;
} | Cap'n Proto | 3 | pdv-ru/ClickHouse | tests/integration/test_storage_kafka/clickhouse_path/format_schemas/test.capnp | [
"Apache-2.0"
] |
#include <iostream>
using namespace std;
class Rectangle
int width, height;
public:
known Rectangle()
width = 20
height = 10
set_values(int x, y)
width = x
height = y
area ()
return width * height
| COBOL | 0 | saviour07/CPY | Examples/Class/rectangle.cpy | [
"MIT"
] |
/*
* Copyright (c) 2021 Alex Spataru <https://github.com/alex-spataru>
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*/
import QtQuick
import QtQuick.Window
import QtQuick.Layouts
import QtQuick.Controls
import SerialStudio
import "../Widgets" as Widgets
Item {
id: root
property int widgetIndex: -1
Widgets.Window {
id: window
anchors.fill: parent
title: loader.widgetTitle
icon.source: loader.widgetIcon
borderColor: Cpp_ThemeManager.widgetWindowBorder
onHeaderDoubleClicked: externalWindow.visible = true
WidgetLoader {
id: loader
widgetIndex: root.widgetIndex
anchors {
fill: parent
leftMargin: window.borderWidth
rightMargin: window.borderWidth
bottomMargin: window.borderWidth
}
MouseArea {
hoverEnabled: true
anchors.fill: parent
acceptedButtons: Qt.NoButton
onContainsMouseChanged: loader.processMouseHover(containsMouse)
}
}
}
Window {
id: externalWindow
minimumWidth: 640
minimumHeight: 480
title: externalLoader.widgetTitle
palette.base: Cpp_ThemeManager.widgetWindowBackground
palette.window: Cpp_ThemeManager.widgetWindowBackground
flags: Qt.Dialog | Qt.WindowCloseButtonHint | Qt.WindowTitleHint
WidgetLoader {
id: externalLoader
anchors.fill: parent
isExternalWindow: true
widgetIndex: root.widgetIndex
widgetVisible: externalWindow.visible
}
MouseArea {
hoverEnabled: true
anchors.fill: parent
acceptedButtons: Qt.NoButton
onContainsMouseChanged: externalLoader.processMouseHover(containsMouse)
}
}
}
| QML | 4 | Serial-Studio/Serial-Studio | assets/qml/Dashboard/WidgetDelegate.qml | [
"MIT"
] |
sleep 1
t app appmode photo
sleep 300
t app button shutter PR
d:\autoexec.ash
REBOOT yes
| AGS Script | 0 | waltersgrey/autoexechack | MegaLapse/5MinInterval/Hero3PlusBlack/autoexec.ash | [
"MIT"
] |
*** Keyword ***
Keyword Only In Resource 2
Log Keyword in resource 2
Keyword In Both Resources
Log Keyword in resource 2
Keyword In All Resources And Libraries
Log Keyword in resource 2
Keyword Everywhere
Log Keyword in resource 2
Similar Kw 2
No Operation
| RobotFramework | 3 | phil-davis/robotframework | atest/testdata/keywords/resources/recommendation_resource_2.robot | [
"ECL-2.0",
"Apache-2.0"
] |
/*
* Copyright 2010-2021 JetBrains s.r.o. and Kotlin Programming Language contributors.
* Use of this source code is governed by the Apache 2.0 license that can be found in the license/LICENSE.txt file.
*/
package org.jetbrains.kotlin.util
/** Implementation status of a member symbol that is available inside a class scope. */
enum class ImplementationStatus {
/** This symbol is not implemented and should be implemented if the class is not abstract. */
NOT_IMPLEMENTED,
/** The symbol is inheriting multiple non-abstract symbols and hence must be explicitly implemented. */
AMBIGUOUSLY_INHERITED,
/**
* This symbol has an inherited implementation, and it can be overridden if desired. For example, it's an open non-abstract member or
* it's automatically synthesized by the Kotlin compiler.
*/
INHERITED_OR_SYNTHESIZED,
/** The symbol is already implemented in this class. */
ALREADY_IMPLEMENTED,
/**
* The symbol is not implemented in the class and it cannot be implemented. For example, it's final in super classes or the current
* class is `expect`.
*/
CANNOT_BE_IMPLEMENTED;
val shouldBeImplemented: Boolean get() = this == NOT_IMPLEMENTED || this == AMBIGUOUSLY_INHERITED
val isOverridable: Boolean get() = this != ALREADY_IMPLEMENTED && this != CANNOT_BE_IMPLEMENTED
}
| Kotlin | 5 | Mu-L/kotlin | core/compiler.common/src/org/jetbrains/kotlin/util/ImplementationStatus.kt | [
"ECL-2.0",
"Apache-2.0"
] |
import generateUtilityClass from '../generateUtilityClass';
import generateUtilityClasses from '../generateUtilityClasses';
export interface TabsUnstyledClasses {
root: string;
horizontal: string;
vertical: string;
}
export type TabsUnstyledClassKey = keyof TabsUnstyledClasses;
export function getTabsUnstyledUtilityClass(slot: string): string {
return generateUtilityClass('TabsUnstyled', slot);
}
const tabsUnstyledClasses: TabsUnstyledClasses = generateUtilityClasses('TabsUnstyled', [
'root',
'horizontal',
'vertical',
]);
export default tabsUnstyledClasses;
| TypeScript | 4 | dany-freeman/material-ui | packages/mui-base/src/TabsUnstyled/tabsUnstyledClasses.ts | [
"MIT"
] |
module examples/toys/birthday
/*
* Birthday Book
*
* A classic Z example to explain the basic form of an Alloy model. For the original,
* see J.M. Spivey, The Z Notation, Second Edition, Prentice Hall, 1992.
*
* A birthday book has two fields: known, a set of names (of persons whose birthdays are known),
* and date, a function from known names to dates. The operation AddBirthday adds an association
* between a name and a date; it uses the relational override operator (++), so any existing
* mapping from the name to a date is replaced. DelBirthday removes the entry for a given name.
* FindBirthday obtains the date d for a name n. The argument d is declared to be optional (that is,
* a singleton or empty set), so if there is no entry for n, d will be empty. Remind gives the set
* of names whose birthdays fall on a particular day.
*
* The assertion AddWorks says that if you add an entry, then look it up, you get back what you
* just entered. DelIsUndo says that doing DelBirthday after AddBirthday undoes it, as if the add
* had never happened. The first of these assertions is valid; the second isn't.
*
* The function BusyDay shows a case in which Remind produces more than one card.
*
* author: Daniel Jackson, 11/14/01
*/
sig Name {}
sig Date {}
sig BirthdayBook {known: set Name, date: known -> one Date}
pred AddBirthday [bb, bb1: BirthdayBook, n: Name, d: Date] {
bb1.date = bb.date ++ (n->d)
}
pred DelBirthday [bb, bb1: BirthdayBook, n: Name] {
bb1.date = bb.date - (n->Date)
}
pred FindBirthday [bb: BirthdayBook, n: Name, d: lone Date] {
d = bb.date[n]
}
pred Remind [bb: BirthdayBook, today: Date, cards: set Name] {
cards = (bb.date).today
}
pred InitBirthdayBook [bb: BirthdayBook] {
no bb.known
}
assert AddWorks {
all bb, bb1: BirthdayBook, n: Name, d: Date, d1: lone Date |
AddBirthday [bb,bb1,n,d] && FindBirthday [bb1,n,d1] => d = d1
}
assert DelIsUndo {
all bb1,bb2,bb3: BirthdayBook, n: Name, d: Date|
AddBirthday [bb1,bb2,n,d] && DelBirthday [bb2,bb3,n]
=> bb1.date = bb3.date
}
check AddWorks for 3 but 2 BirthdayBook expect 0
check DelIsUndo for 3 but 2 BirthdayBook expect 1
pred BusyDay [bb: BirthdayBook, d: Date]{
some cards: set Name | Remind [bb,d,cards] && !lone cards
}
run BusyDay for 3 but 1 BirthdayBook expect 1
| Alloy | 5 | haslab/Electrum | electrum/src/main/resources/models/examples/toys/birthday.als | [
"MIT"
] |
$ today == f$extract(0,11,f$time())
$ open /read/error=sethost infile "sys$login:bar.dat"
$ read /error=sethost /end=sethost infile data
$ if f$extract(0,11,data) .EQS. today then goto close_file
$sethost:
$ run STH ! start terminal emulation
$ open /write/error=file_error outfile "sys$login:bar.dat"
$ write /error=file_error outfile today
$close_file:
$ close infile
$ close outfile
$ goto exit
$file_error:
$ write sys$output f$fao("ERROR !UL - !AS",'$STATUS',f$message('$STATUS'))
$exit:
| Clean | 3 | MichelValentin/PC-DCL | examples/bar.dcl | [
"BSD-3-Clause"
] |
#*****************************************************************************
# *
# Make file for VMS *
# Author : J.Jansen (joukj@hrem.nano.tudelft.nl) *
# Date : 15 November 2010 *
# *
#*****************************************************************************
.first
define wx [--.include.wx]
.ifdef __WXMOTIF__
CXX_DEFINE = /define=(__WXMOTIF__=1)/name=(as_is,short)\
/assume=(nostdnew,noglobal_array_new)
.else
.ifdef __WXGTK__
CXX_DEFINE = /define=(__WXGTK__=1)/float=ieee/name=(as_is,short)/ieee=denorm\
/assume=(nostdnew,noglobal_array_new)
.else
.ifdef __WXGTK2__
CXX_DEFINE = /define=(__WXGTK__=1,VMS_GTK2=1)/float=ieee/name=(as_is,short)/ieee=denorm\
/assume=(nostdnew,noglobal_array_new)
.else
.ifdef __WXX11__
CXX_DEFINE = /define=(__WXX11__=1,__WXUNIVERSAL__==1)/float=ieee\
/name=(as_is,short)/assume=(nostdnew,noglobal_array_new)
.else
CXX_DEFINE =
.endif
.endif
.endif
.endif
.suffixes : .cpp
.cpp.obj :
cxx $(CXXFLAGS)$(CXX_DEFINE) $(MMS$TARGET_NAME).cpp
all :
.ifdef __WXMOTIF__
$(MMS)$(MMSQUALIFIERS) xrcdemo.exe
.else
.ifdef __WXGTK__
$(MMS)$(MMSQUALIFIERS) xrcdemo_gtk.exe
.else
.ifdef __WXGTK2__
$(MMS)$(MMSQUALIFIERS) xrcdemo_gtk2.exe
.else
.ifdef __WXX11__
$(MMS)$(MMSQUALIFIERS) xrcdemo_x11.exe
.endif
.endif
.endif
.endif
.ifdef __WXMOTIF__
xrcdemo.exe : xrcdemo.obj,custclas.obj,derivdlg.obj,myframe.obj,objrefdlg.obj
cxxlink xrcdemo,custclas.obj,derivdlg.obj,myframe.obj,objrefdlg.obj,\
[--.lib]vms/opt
.else
.ifdef __WXGTK__
xrcdemo_gtk.exe : xrcdemo.obj,custclas.obj,derivdlg.obj,myframe.obj,\
objrefdlg.obj
cxxlink/exec=xrcdemo_gtk.exe xrcdemo,custclas.obj,derivdlg.obj,\
myframe.obj,objrefdlg.obj,[--.lib]vms_gtk/opt
.else
.ifdef __WXGTK2__
xrcdemo_gtk2.exe : xrcdemo.obj,custclas.obj,derivdlg.obj,myframe.obj
cxxlink/exec=xrcdemo_gtk2.exe xrcdemo,custclas.obj,derivdlg.obj,\
myframe.obj,[--.lib]vms_gtk2/opt
.else
.ifdef __WXX11__
xrcdemo_x11.exe : xrcdemo.obj,custclas.obj,derivdlg.obj,myframe.obj
cxxlink/exec=xrcdemo_x11.exe xrcdemo,custclas.obj,derivdlg.obj,\
myframe.obj,[--.lib]vms_x11_univ/opt
.endif
.endif
.endif
.endif
xrcdemo.obj : xrcdemo.cpp
custclas.obj : custclas.cpp
derivdlg.obj : derivdlg.cpp
myframe.obj : myframe.cpp
objrefdlg.obj : objrefdlg.cpp
| Module Management System | 3 | madanagopaltcomcast/pxCore | examples/pxScene2d/external/WinSparkle/3rdparty/wxWidgets/samples/xrc/descrip.mms | [
"Apache-2.0"
] |
// Copyright 2015 gRPC authors.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
syntax = "proto3";
package grpc.testing;
option objc_class_prefix = "AUTH";
// Unary request.
message Request {
// Whether Response should include username.
bool fill_username = 4;
// Whether Response should include OAuth scope.
bool fill_oauth_scope = 5;
}
// Unary response, as configured by the request.
message Response {
// The user the request came from, for verifying authentication was
// successful.
string username = 2;
// OAuth scope.
string oauth_scope = 3;
}
service TestService {
// One request followed by one response.
rpc UnaryCall(Request) returns (Response);
}
| Protocol Buffer | 4 | samotarnik/grpc | examples/protos/auth_sample.proto | [
"Apache-2.0"
] |
<%= yield :header -%>
<%= capture do %>
this works
<% end %>
<%= yield :footer -%>
<%= yield(:unknown).presence || "." -%>
| HTML+ERB | 3 | mdesantis/rails | actionview/test/fixtures/layouts/streaming_with_capture.erb | [
"MIT"
] |
; CLW file contains information for the MFC ClassWizard
[General Info]
Version=1
LastClass=CPdbFairyDlg
LastTemplate=CDialog
NewFileInclude1=#include "stdafx.h"
NewFileInclude2=#include "PdbFairy.h"
ClassCount=3
Class1=CPdbFairyApp
Class2=CPdbFairyDlg
Class3=CAboutDlg
ResourceCount=5
Resource1=IDD_ABOUTBOX
Resource2=IDR_MAINFRAME
Resource3=IDD_PDBFAIRY_DIALOG
Resource4=IDD_ABOUTBOX (English (U.S.))
Resource5=IDD_PDBFAIRY_DIALOG (English (U.S.))
[CLS:CPdbFairyApp]
Type=0
HeaderFile=PdbFairy.h
ImplementationFile=PdbFairy.cpp
Filter=N
[CLS:CPdbFairyDlg]
Type=0
HeaderFile=PdbFairyDlg.h
ImplementationFile=PdbFairyDlg.cpp
Filter=D
BaseClass=CDialog
VirtualFilter=dWC
LastObject=IDC_LIST1
[CLS:CAboutDlg]
Type=0
HeaderFile=PdbFairyDlg.h
ImplementationFile=PdbFairyDlg.cpp
Filter=D
[DLG:IDD_ABOUTBOX]
Type=1
ControlCount=4
Control1=IDC_STATIC,static,1342177283
Control2=IDC_STATIC,static,1342308352
Control3=IDC_STATIC,static,1342308352
Control4=IDOK,button,1342373889
Class=CAboutDlg
[DLG:IDD_PDBFAIRY_DIALOG]
Type=1
ControlCount=3
Control1=IDOK,button,1342242817
Control2=IDCANCEL,button,1342242816
Control3=IDC_STATIC,static,1342308352
Class=CPdbFairyDlg
[DLG:IDD_PDBFAIRY_DIALOG (English (U.S.))]
Type=1
Class=CPdbFairyDlg
ControlCount=7
Control1=IDCANCEL,button,1342242816
Control2=IDC_LIST1,listbox,1352728833
Control3=ID_OPEN,button,1342242816
Control4=ID_HEADER,button,1342242816
Control5=ID_STREAMS,button,1342242816
Control6=ID_CLEAR,button,1342242816
Control7=ID_SIGNATURE,button,1342242816
[DLG:IDD_ABOUTBOX (English (U.S.))]
Type=1
Class=CAboutDlg
ControlCount=4
Control1=IDC_STATIC,static,1342177283
Control2=IDC_STATIC,static,1342308480
Control3=IDC_STATIC,static,1342308352
Control4=IDOK,button,1342373889
| Clarion | 2 | oudream/ccxx | test/dump/swdbgbk_src/chap25/PdbFairy/PdbFairy.clw | [
"MIT"
] |
//
// Copyright (c) 2006, Brian Frank and Andy Frank
// Licensed under the Academic Free License version 3.0
//
// History:
// 15 Sep 05 Brian Frank Creation
// 3 Jun 06 Brian Frank Ported from Java to Fantom - Megan's b-day!
//
**
** TypeDef models a type definition for a class, mixin or enum
**
class TypeDef : CTypeDef
{
//////////////////////////////////////////////////////////////////////////
// Construction
//////////////////////////////////////////////////////////////////////////
new make(Loc loc, CompilationUnit unit, Str name, Int flags := 0)
{
this.loc = loc
// this.ns = ns
this.pod = unit.pod
this.unit = unit
this.name = name
this.qname = pod.name + "::" + name
this.flags = flags
//this.isVal = CType.isValType(qname)
this.inheritances = CType[,]
this.enumDefs = EnumDef[,]
// this.slotMap = Str:CSlot[:]
// this.slotDefMap = Str:SlotDef[:]
this.slotDefList = SlotDef[,]
this.closures = ClosureExpr[,]
}
override Loc loc
override Int flags
override DocDef? doc
override CFacet[]? facets
Void addFacet(CType type, [Str:Obj]? vals := null)
{
if (facets == null) facets = FacetDef[,]
loc := this.loc
f := FacetDef(loc, type)
vals?.each |v, n|
{
f.names.add(n)
f.vals.add(Expr.makeForLiteral(loc, v))
}
facets.add(f)
}
//////////////////////////////////////////////////////////////////////////
// Access
//////////////////////////////////////////////////////////////////////////
**
** Return if this type is the anonymous class of a closure
**
Bool isClosure()
{
return closure != null
}
//////////////////////////////////////////////////////////////////////////
// Slots
//////////////////////////////////////////////////////////////////////////
**
** Add a slot to the type definition. The method is used to add
** SlotDefs declared by this type as well as slots inherited by
** this type.
**
Void addSlot(SlotDef s, Int? slotDefIndex := null)
{
// if MethodDef
m := s as MethodDef
if (m != null)
{
// static initializes are just temporarily added to the
// slotDefList but never into the name map - we just need
// to keep them in declared order until they get collapsed
// and removed in the Normalize step
if (m.isStaticInit)
{
slotDefList.add(m)
return
}
// field accessors are added only to slotDefList,
// name lookup is always the field itself
if (m.isFieldAccessor)
{
slotDefList.add(m)
return
}
if (m.isOverload) {
slotDefList.add(m)
return
}
}
// sanity check
name := s.name
// if (slotDefMap.containsKey(name))
// throw Err("Internal error: duplicate slot $name [$loc.toLocStr]")
// if my own SlotDef
def := s as SlotDef
if (def != null && def.parent === this)
{
// add to my slot definitions
if (slotDefMapCache != null)
slotDefMapCache[name] = def
if (slotDefIndex == null)
slotDefList.add(def)
else
slotDefList.insert(slotDefIndex, def)
// if non-const FieldDef, then add getter/setter methods
if (s is FieldDef)
{
f := (FieldDef)s
if (f.get != null) addSlot(f.get)
if (f.set != null) addSlot(f.set)
}
}
}
// **
// ** Replace oldSlot with newSlot in my slot tables.
// **
// Void replaceSlot(SlotDef oldSlot, SlotDef newSlot)
// {
// // sanity checks
// if (oldSlot.name != newSlot.name)
// throw Err("Internal error: not same names: $oldSlot != $newSlot [$loc.toLocStr]")
// if (slotMap[oldSlot.name] !== oldSlot)
// throw Err("Internal error: old slot not mapped: $oldSlot [$loc.toLocStr]")
//
// // remap in slotMap table
// name := oldSlot.name
// slotMap[name] = newSlot
//
// // if old is SlotDef
// oldDef := oldSlot as SlotDef
// if (oldDef != null && oldDef.parent === this)
// {
// slotDefMap[name] = oldDef
// slotDefList.remove(oldDef)
// }
//
// // if new is SlotDef
// newDef := newSlot as SlotDef
// if (newDef != null && newDef.parent === this)
// {
// slotDefMap[name] = newDef
// slotDefList.add(newDef)
// }
// }
**
** Get static initializer if one is defined.
**
MethodDef? staticInit()
{
return slots["static\$init"]
}
MethodDef? instanceInit()
{
return slots["instance\$init\$$pod.name\$$name"]
}
**
** If during parse we added any static initializer methods,
** now is the time to remove them all and replace them with a
** single collapsed MethodDef (processed in Normalize step)
**
Void normalizeStaticInits(MethodDef m)
{
// remove any temps we had in slotDefList
slotDefList = slotDefList.exclude |SlotDef s->Bool|
{
return MethodDef.isNameStaticInit(s.name)
}
// fix enclosingSlot of closures used in those temp statics
closures.each |ClosureExpr c|
{
if (c.enclosingSlot is MethodDef && ((MethodDef)c.enclosingSlot).isStaticInit)
c.enclosingSlot = m
}
// now we add into all slot tables
// slotMap[m.name] = m
// slotDefMap[m.name] = m
slotDefList.add(m)
}
//////////////////////////////////////////////////////////////////////////
// SlotDefs
//////////////////////////////////////////////////////////////////////////
**
** Get the SlotDefs declared within this TypeDef.
**
override SlotDef[] slotDefs()
{
return slotDefList
}
//////////////////////////////////////////////////////////////////////////
// Enum
//////////////////////////////////////////////////////////////////////////
**
** Return EnumDef for specified name or null.
**
public EnumDef? enumDef(Str name)
{
return enumDefs.find |EnumDef def->Bool| { def.name == name }
}
//////////////////////////////////////////////////////////////////////////
// Tree
//////////////////////////////////////////////////////////////////////////
Void walk(Visitor v, VisitDepth depth)
{
v.enterUnit(unit)
v.enterTypeDef(this)
walkFacets(v, depth)
if (depth >= VisitDepth.slotDef)
{
slotDefs.each |SlotDef slot| { slot.walk(v, depth) }
}
v.visitTypeDef(this)
v.exitTypeDef(this)
v.exitUnit(unit)
}
override Void getChildren(CNode[] list, [Str:Obj]? options) {
if (facets != null) {
facets.each |FacetDef f| {
list.add(f)
}
}
this.inheritances.each |t| {
list.add(t)
}
slotDefs.each |slot| {
if (slot.isSynthetic) return
list.add(slot)
}
}
//////////////////////////////////////////////////////////////////////////
// Debug
//////////////////////////////////////////////////////////////////////////
override Void print(AstWriter out)
{
super.print(out)
if (isMixin)
out.w("mixin $name")
else if (isEnum)
out.w("enum $name")
else
out.w("class $name")
// if (base != null || !mixins.isEmpty)
// {
// out.w(" : ")
// if (base != null) out.w(" $base")
// if (!mixins.isEmpty) out.w(", ").w(mixins.join(", ")).nl
// }
if (!inheritances.isEmpty) out.w(" : ").w(inheritances.join(", ")).nl
else out.nl
out.w("{").nl
out.indent
enumDefs.each |EnumDef e| { e.print(out) }
slotDefs.each |SlotDef s| { s.print(out) }
out.unindent
out.w("}").nl
}
override GenericParamDef[] genericParameters := [,]
Void setBase(CType base) {
if (inheritances.size > 0) inheritances[0] = base
else inheritances.add(base)
}
//////////////////////////////////////////////////////////////////////////
// Fields
//////////////////////////////////////////////////////////////////////////
// override CNamespace ns // compiler's namespace
CompilationUnit unit // parent unit
override PodDef pod // parent pod
override const Str name // simple class name
override const Str qname // podName::name
//override const Bool isVal // is this a value type (Bool, Int, etc)
Bool baseSpecified := true // was base assigned from source code
// TypeRef? base // extends class
// TypeRef[] mixins // mixin types
override CType[] inheritances
EnumDef[] enumDefs // declared enumerated pairs (only if enum)
ClosureExpr[] closures // closures where I am enclosing type (Parse)
ClosureExpr? closure // if I am a closure anonymous class
private SlotDef[] slotDefList // declared slot definitions
FacetDef[]? indexedFacets // used by WritePod
} | Fantom | 4 | fanx-dev/fanx | compiler/compilerx/fan/ast/TypeDef.fan | [
"AFL-3.0"
] |