Datasets:
text
stringlengths 0
27.1M
| meta
dict |
|---|---|
import tactic.basic
import tactic.omega
import .ch11_imp
open imp
/-
Open Scope imp_scope.
Fixpoint ceval_step2 (st : state) (c : com) (i : nat) : state :=
match i with
| O ⇒ empty_st
| S i' ⇒
match c with
| SKIP ⇒
st
| l ::= a1 ⇒
(l !-> aeval st a1 ; st)
| c1 ;; c2 ⇒
let st' := ceval_step2 st c1 i' in
ceval_step2 st' c2 i'
| TEST b THEN c1 ELSE c2 FI ⇒
if (beval st b)
then ceval_step2 st c1 i'
else ceval_step2 st c2 i'
| WHILE b1 DO c1 END ⇒
if (beval st b1)
then let st' := ceval_step2 st c1 i' in
ceval_step2 st' c i'
else st
end
end.
Close Scope imp_scope.
-/
open nat
def ceval_step₂ : imp.state → com → ℕ → imp.state
| st c 0 := empty_st
| st SKIP (succ i) := st
| st (l ::= a₁) (succ i) := l !→ aeval st a₁ ; st
| st (c₁ ;; c₂) (succ i) :=
let st' := ceval_step₂ st c₁ i in
ceval_step₂ st' c₂ i
| st (TEST b THEN c₁ ELSE c₂ FI) (succ i) :=
if beval st b
then ceval_step₂ st c₁ i
else ceval_step₂ st c₂ i
| st (WHILE b DO c END) (succ i) :=
if beval st b
then
let st' := ceval_step₂ st c i in
ceval_step₂ st' c i
else st
/-
Open Scope imp_scope.
Fixpoint ceval_step3 (st : state) (c : com) (i : nat)
: option state :=
match i with
| O ⇒ None
| S i' ⇒
match c with
| SKIP ⇒
Some st
| l ::= a1 ⇒
Some (l !-> aeval st a1 ; st)
| c1 ;; c2 ⇒
match (ceval_step3 st c1 i') with
| Some st' ⇒ ceval_step3 st' c2 i'
| None ⇒ None
end
| TEST b THEN c1 ELSE c2 FI ⇒
if (beval st b)
then ceval_step3 st c1 i'
else ceval_step3 st c2 i'
| WHILE b1 DO c1 END ⇒
if (beval st b1)
then match (ceval_step3 st c1 i') with
| Some st' ⇒ ceval_step3 st' c i'
| None ⇒ None
end
else Some st
end
end.
Close Scope imp_scope.
-/
def ceval_step₃ : imp.state → com → ℕ → option imp.state
| st c 0 := none
| st SKIP (succ i) := some st
| st (l ::= a₁) (succ i) := some $ l !→ aeval st a₁ ; st
| st (c₁ ;; c₂) (succ i) :=
match ceval_step₃ st c₁ i with
| some st' := ceval_step₃ st' c₂ i
| none := none
end
| st (TEST b THEN c₁ ELSE c₂ FI) (succ i) :=
if beval st b
then ceval_step₃ st c₁ i
else ceval_step₃ st c₂ i
| st (WHILE b DO c END) (succ i) :=
if beval st b
then
match ceval_step₃ st c i with
| some st' := ceval_step₃ st' c i
| none := none
end
else st
/-
Notation "'LETOPT' x <== e1 'IN' e2"
:= (match e1 with
| Some x ⇒ e2
| None ⇒ None
end)
(right associativity, at level 60).
Open Scope imp_scope.
Fixpoint ceval_step (st : state) (c : com) (i : nat)
: option state :=
match i with
| O ⇒ None
| S i' ⇒
match c with
| SKIP ⇒
Some st
| l ::= a1 ⇒
Some (l !-> aeval st a1 ; st)
| c1 ;; c2 ⇒
LETOPT st' <== ceval_step st c1 i' IN
ceval_step st' c2 i'
| TEST b THEN c1 ELSE c2 FI ⇒
if (beval st b)
then ceval_step st c1 i'
else ceval_step st c2 i'
| WHILE b1 DO c1 END ⇒
if (beval st b1)
then LETOPT st' <== ceval_step st c1 i' IN
ceval_step st' c i'
else Some st
end
end.
Close Scope imp_scope.
Definition test_ceval (st:state) (c:com) :=
match ceval_step st c 500 with
| None ⇒ None
| Some st ⇒ Some (st X, st Y, st Z)
end.
(* Compute
(test_ceval empty_st
(X ::= 2;;
TEST (X <= 1)
THEN Y ::= 3
ELSE Z ::= 4
FI)).
====>
Some (2, 0, 4) *)
-/
/- nah, not doing the notation -/
/- but the match is better -/
def ceval_step : imp.state → com → ℕ → option imp.state
| st c 0 := none
| st c (succ i) :=
match c with
| SKIP := some st
| l ::= a₁ := some $ l !→ aeval st a₁ ; st
| c₁ ;; c₂ := do st' ← ceval_step st c₁ i, ceval_step st' c₂ i
| TEST b THEN c₁ ELSE c₂ FI :=
if beval st b
then ceval_step st c₁ i
else ceval_step st c₂ i
| WHILE b DO c₁ END :=
if beval st b
then do st' ← ceval_step st c₁ i, ceval_step st' c i
else st
end
def test_ceval (st c) :=
do st ← ceval_step st c 500, pure (st X, st Y, st Z)
#eval test_ceval empty_st $
X ::= 2;;
TEST X ≤' 1
THEN Y ::= 3
ELSE Z ::= 4
FI
/-
Definition pup_to_n : com
(* REPLACE THIS LINE WITH ":= _your_definition_ ." *). Admitted.
(*
Example pup_to_n_1 :
test_ceval (X !-> 5) pup_to_n
= Some (0, 15, 0).
Proof. reflexivity. Qed.
*)
-/
def pup_to_n' : com :=
Y ::= 0;;
WHILE ¬X == 0 DO
Y ::= Y + X;;
X ::= X - 1
END
example : test_ceval (X !→ 5) pup_to_n' = some (0, 15, 0) := rfl
def is_even : com :=
WHILE 2 ≤' X DO X ::= X - 2 END;;
TEST X == 0 THEN Z ::= 0 ELSE Z ::= 1 FI
example : test_ceval (X !→ 5) is_even = some (1, 0, 1) := rfl
example : test_ceval (X !→ 10) is_even = some (0, 0, 0) := rfl
/-
Theorem ceval_step__ceval: ∀c st st',
(∃i, ceval_step st c i = Some st') →
st =[ c ]⇒ st'.
Proof.
intros c st st' H.
inversion H as [i E].
clear H.
generalize dependent st'.
generalize dependent st.
generalize dependent c.
induction i as [| i' ].
- (* i = 0 -- contradictory *)
intros c st st' H. discriminate H.
- (* i = S i' *)
intros c st st' H.
destruct c;
simpl in H; inversion H; subst; clear H.
+ (* SKIP *) apply E_Skip.
+ (* ::= *) apply E_Ass. reflexivity.
+ (* ;; *)
destruct (ceval_step st c1 i') eqn:Heqr1.
* (* Evaluation of r1 terminates normally *)
apply E_Seq with s.
apply IHi'. rewrite Heqr1. reflexivity.
apply IHi'. simpl in H1. assumption.
* (* Otherwise -- contradiction *)
discriminate H1.
+ (* TEST *)
destruct (beval st b) eqn:Heqr.
* (* r = true *)
apply E_IfTrue. rewrite Heqr. reflexivity.
apply IHi'. assumption.
* (* r = false *)
apply E_IfFalse. rewrite Heqr. reflexivity.
apply IHi'. assumption.
+ (* WHILE *) destruct (beval st b) eqn :Heqr.
* (* r = true *)
destruct (ceval_step st c i') eqn:Heqr1.
{ (* r1 = Some s *)
apply E_WhileTrue with s. rewrite Heqr.
reflexivity.
apply IHi'. rewrite Heqr1. reflexivity.
apply IHi'. simpl in H1. assumption. }
{ (* r1 = None *) discriminate H1. }
* (* r = false *)
injection H1. intros H2. rewrite <- H2.
apply E_WhileFalse. apply Heqr. Qed.
-/
open imp.com imp.ceval
theorem ceval_step__ceval {c st st'} (h : ∃i, ceval_step st c i = some st')
: st =[ c ]⇒ st' :=
begin
cases h with i h,
induction i with i ih generalizing c st st',
cases h,
cases c; simp [ceval_step] at h,
case CSkip {
subst h,
exact E_Skip st,
},
case CAss : x a {
subst h,
apply E_Ass,
refl,
},
case CSeq : c₁ c₂ {
cases h with a h,
exact E_Seq (ih h.left) (ih h.right),
},
case CIf : b c₁ c₂ {
cases heq: beval st b; rw heq at h; simp at h,
exact E_IfFalse c₁ heq (ih h),
exact E_IfTrue c₂ heq (ih h),
},
case CWhile : b c {
cases heq: beval st b; rw heq at h; simp at h,
cases h,
exact E_WhileFalse c heq,
cases h with a h,
exact E_WhileTrue heq (ih h.left) (ih h.right),
},
end
/-
Theorem ceval_step_more: ∀i1 i2 st st' c,
i1 ≤ i2 →
ceval_step st c i1 = Some st' →
ceval_step st c i2 = Some st'.
Proof.
induction i1 as [|i1']; intros i2 st st' c Hle Hceval.
- (* i1 = 0 *)
simpl in Hceval. discriminate Hceval.
- (* i1 = S i1' *)
destruct i2 as [|i2']. inversion Hle.
assert (Hle': i1' ≤ i2') by omega.
destruct c.
+ (* SKIP *)
simpl in Hceval. inversion Hceval.
reflexivity.
+ (* ::= *)
simpl in Hceval. inversion Hceval.
reflexivity.
+ (* ;; *)
simpl in Hceval. simpl.
destruct (ceval_step st c1 i1') eqn:Heqst1'o.
* (* st1'o = Some *)
apply (IHi1' i2') in Heqst1'o; try assumption.
rewrite Heqst1'o. simpl. simpl in Hceval.
apply (IHi1' i2') in Hceval; try assumption.
* (* st1'o = None *)
discriminate Hceval.
+ (* TEST *)
simpl in Hceval. simpl.
destruct (beval st b); apply (IHi1' i2') in Hceval;
assumption.
+ (* WHILE *)
simpl in Hceval. simpl.
destruct (beval st b); try assumption.
destruct (ceval_step st c i1') eqn: Heqst1'o.
* (* st1'o = Some *)
apply (IHi1' i2') in Heqst1'o; try assumption.
rewrite → Heqst1'o. simpl. simpl in Hceval.
apply (IHi1' i2') in Hceval; try assumption.
* (* i1'o = None *)
simpl in Hceval. discriminate Hceval. Qed.
-/
theorem ceval_step_more {i₁ i₂ st st' c}
(hl : i₁ ≤ i₂) (h: ceval_step st c i₁ = some st')
: ceval_step st c i₂ = some st' :=
begin
induction i₁ with i₁ ih generalizing i₂ st st' c,
unfold ceval_step at h,
cases h,
cases i₂,
cases hl,
/- omega failed here (yikes) -/
have hl, exact le_of_succ_le_succ hl,
cases c,
case CSkip {
cases h,
unfold ceval_step,
},
case CAss : x a {
unfold ceval_step at *,
assumption,
},
case CSeq : c₁ c₂ {
unfold ceval_step at *,
cases h₁ : ceval_step st c₁ i₁ with st'',
simp only [ceval_step, h₁] at h,
contradiction,
simp only [h₁, option.some_bind] at h,
simp only [ih hl h₁, ih hl h, option.some_bind],
},
case CIf : b c₁ c₂ {
unfold ceval_step at *,
cases beval st b; simp at *; exact ih hl h,
},
case CWhile : b c {
unfold ceval_step at *,
cases beval st b; simp at *,
exact h,
cases h with a h,
exact ⟨a, ih hl h.left, ih hl h.right⟩,
},
end
/-
Theorem ceval__ceval_step: ∀c st st',
st =[ c ]⇒ st' →
∃i, ceval_step st c i = Some st'.
Proof.
intros c st st' Hce.
induction Hce.
(* FILL IN HERE *) Admitted.
-/
lemma le_max (n m : ℕ) : n ≤ max n m ∧ m ≤ max n m :=
begin
simp only [le_max_iff],
split,
exact or.inl (refl _),
exact or.inr (refl _),
end
theorem ceval__ceval_step {c st st'} (h : st =[ c ]⇒ st')
: ∃i, ceval_step st c i = some st' :=
begin
induction h,
case E_Skip { exact ⟨1, rfl⟩, },
case E_Ass : st a n x h {
exact ⟨1, by simp only [ceval_step, h]⟩,
},
case E_Seq : c₁ c₂ st'' st''' st'''' h₁ h₂ ih₁ ih₂ {
cases ih₁ with i₁ ih₁,
cases ih₂ with i₂ ih₂,
exact ⟨max i₁ i₂ + 1, by {
unfold ceval_step,
have hl, exact le_max i₁ i₂,
simp [ceval_step_more hl.left ih₁, ceval_step_more hl.right ih₂],
}⟩,
},
case E_IfTrue : st'' st''' b c₁ c₂ h₁ h₂ ih {
cases ih with i ih,
exact ⟨i + 1, by {
unfold ceval_step,
simp [h₁, ih],
}⟩,
},
case E_IfFalse : st'' st''' b c₁ c₂ h₁ h₂ ih {
cases ih with i ih,
exact ⟨i + 1, by {
unfold ceval_step,
simp [h₁, ih],
}⟩,
},
case E_WhileFalse : b st'' c h {
exact ⟨1, by {
unfold ceval_step,
simp [h],
refl,
}⟩,
},
case E_WhileTrue : st'' st''' st'''' b c hb h₂ h₃ ih₁ ih₂ {
cases ih₁ with i₁ ih₁,
cases ih₂ with i₂ ih₂,
exact ⟨max i₁ i₂ + 1, by {
unfold ceval_step,
simp [hb],
exact ⟨st''', by {
have hl, exact le_max i₁ i₂,
exact ⟨ceval_step_more hl.left ih₁, ceval_step_more hl.right ih₂⟩,
}⟩,
}⟩,
},
end
/-
Theorem ceval_and_ceval_step_coincide: ∀c st st',
st =[ c ]⇒ st'
↔ ∃i, ceval_step st c i = Some st'.
Proof.
intros c st st'.
split. apply ceval__ceval_step. apply ceval_step__ceval.
Qed.
-/
theorem ceval_and_ceval_step_coincide (c st st')
: (st =[ c ]⇒ st') ↔ ∃i, ceval_step st c i = some st' :=
⟨ceval__ceval_step, ceval_step__ceval⟩
/-
Theorem ceval_deterministic' : ∀c st st1 st2,
st =[ c ]⇒ st1 →
st =[ c ]⇒ st2 →
st1 = st2.
Proof.
intros c st st1 st2 He1 He2.
apply ceval__ceval_step in He1.
apply ceval__ceval_step in He2.
inversion He1 as [i1 E1].
inversion He2 as [i2 E2].
apply ceval_step_more with (i2 := i1 + i2) in E1.
apply ceval_step_more with (i2 := i1 + i2) in E2.
rewrite E1 in E2. inversion E2. reflexivity.
omega. omega. Qed.
-/
theorem ceval_deterministic' {c st st₁ st₂}
(h₁ : st =[ c ]⇒ st₁) (h₂ : st =[ c ]⇒ st₂) : st₁ = st₂ :=
begin
cases ceval__ceval_step h₁ with i₁ h₁,
cases ceval__ceval_step h₂ with i₂ h₂,
replace h₂, exact ceval_step_more (le_max i₁ i₂).right h₂,
rw ceval_step_more (le_max i₁ i₂).left h₁ at h₂,
injection h₂,
end
|
{
"alphanum_fraction": null,
"author": "michens",
"avg_line_length": null,
"converted": null,
"ext": null,
"file": null,
"hexsha": null,
"include": null,
"lang": null,
"length": null,
"llama_tokens": null,
"mathlib_filename": null,
"max_forks_count": null,
"max_forks_repo_forks_event_max_datetime": null,
"max_forks_repo_forks_event_min_datetime": null,
"max_forks_repo_head_hexsha": null,
"max_forks_repo_licenses": null,
"max_forks_repo_name": null,
"max_forks_repo_path": null,
"max_issues_count": null,
"max_issues_repo_head_hexsha": null,
"max_issues_repo_issues_event_max_datetime": null,
"max_issues_repo_issues_event_min_datetime": null,
"max_issues_repo_licenses": null,
"max_issues_repo_name": null,
"max_issues_repo_path": null,
"max_line_length": null,
"max_stars_count": null,
"max_stars_repo_head_hexsha": null,
"max_stars_repo_licenses": null,
"max_stars_repo_name": null,
"max_stars_repo_path": null,
"max_stars_repo_stars_event_max_datetime": null,
"max_stars_repo_stars_event_min_datetime": null,
"num_tokens": null,
"path": "github-repos/lean/michens-learn-lean/learn-lean-f38fc342780ddff5a164a18e5482163dea506ccd/sf/v1/ch14_impcevalfun.lean",
"reason": null,
"repo": "learn-lean",
"save_path": "github-repos/lean/michens-learn-lean",
"sha": "f38fc342780ddff5a164a18e5482163dea506ccd",
"size": null
}
|
/-
Copyright (c) 2022 Damiano Testa. All rights reserved.
Released under Apache 2.0 license as described in the file LICENSE.
Authors: Damiano Testa
-/
import data.polynomial.algebra_map
import ring_theory.localization.basic
/-! # Laurent polynomials
We introduce Laurent polynomials over a semiring `R`. Mathematically, they are expressions of the
form
$$
\sum_{i \in \mathbb{Z}} a_i T ^ i
$$
where the sum extends over a finite subset of `ℤ`. Thus, negative exponents are allowed. The
coefficients come from the semiring `R` and the variable `T` commutes with everything.
Since we are going to convert back and forth between polynomials and Laurent polynomials, we
decided to maintain some distinction by using the symbol `T`, rather than `X`, as the variable for
Laurent polynomials
## Notation
The symbol `R[T;T⁻¹]` stands for `laurent_polynomial R`. We also define
* `C : R →+* R[T;T⁻¹]` the inclusion of constant polynomials, analogous to the one for `R[X]`;
* `T : ℤ → R[T;T⁻¹]` the sequence of powers of the variable `T`.
## Implementation notes
We define Laurent polynomials as `add_monoid_algebra R ℤ`.
Thus, they are essentially `finsupp`s `ℤ →₀ R`.
This choice differs from the current irreducible design of `polynomial`, that instead shields away
the implementation via `finsupp`s. It is closer to the original definition of polynomials.
As a consequence, `laurent_polynomial` plays well with polynomials, but there is a little roughness
in establishing the API, since the `finsupp` implementation of `R[X]` is well-shielded.
Unlike the case of polynomials, I felt that the exponent notation was not too easy to use, as only
natural exponents would be allowed. Moreover, in the end, it seems likely that we should aim to
perform computations on exponents in `ℤ` anyway and separating this via the symbol `T` seems
convenient.
I made a *heavy* use of `simp` lemmas, aiming to bring Laurent polynomials to the form `C a * T n`.
Any comments or suggestions for improvements is greatly appreciated!
## Future work
Lots is missing!
-- (Riccardo) add inclusion into Laurent series.
-- (Riccardo) giving a morphism (as `R`-alg, so in the commutative case)
from `R[T,T⁻¹]` to `S` is the same as choosing a unit of `S`.
-- A "better" definition of `trunc` would be as an `R`-linear map. This works:
-- ```
-- def trunc : R[T;T⁻¹] →[R] R[X] :=
-- begin
-- refine (_ : add_monoid_algebra R ℕ →[R] R[X]).comp _,
-- { exact ⟨(to_finsupp_iso R).symm, by simp⟩ },
-- { refine ⟨λ r, comap_domain _ r (set.inj_on_of_injective (λ a b ab, int.of_nat.inj ab) _), _⟩,
-- exact λ r f, comap_domain_smul _ _ _ }
-- end
-- ```
-- but it would make sense to bundle the maps better, for a smoother user experience.
-- I (DT) did not have the strength to embark on this (possibly short!) journey, after getting to
-- this stage of the Laurent process!
-- This would likely involve adding a `comap_domain` analogue of
-- `add_monoid_algebra.map_domain_alg_hom` and an `R`-linear version of
-- `polynomial.to_finsupp_iso`.
-- Add `degree, int_degree, int_trailing_degree, leading_coeff, trailing_coeff,...`.
-/
open_locale polynomial big_operators
open polynomial add_monoid_algebra finsupp
noncomputable theory
variables {R : Type*}
/-- The semiring of Laurent polynomials with coefficients in the semiring `R`.
We denote it by `R[T;T⁻¹]`.
The ring homomorphism `C : R →+* R[T;T⁻¹]` includes `R` as the constant polynomials. -/
abbreviation laurent_polynomial (R : Type*) [semiring R] := add_monoid_algebra R ℤ
local notation R`[T;T⁻¹]`:9000 := laurent_polynomial R
/-- The ring homomorphism, taking a polynomial with coefficients in `R` to a Laurent polynomial
with coefficients in `R`. -/
def polynomial.to_laurent [semiring R] : R[X] →+* R[T;T⁻¹] :=
(map_domain_ring_hom R int.of_nat_hom).comp (to_finsupp_iso R)
/-- This is not a simp lemma, as it is usually preferable to use the lemmas about `C` and `X`
instead. -/
lemma polynomial.to_laurent_apply [semiring R] (p : R[X]) :
p.to_laurent = p.to_finsupp.map_domain coe := rfl
/-- The `R`-algebra map, taking a polynomial with coefficients in `R` to a Laurent polynomial
with coefficients in `R`. -/
def polynomial.to_laurent_alg [comm_semiring R] :
R[X] →ₐ[R] R[T;T⁻¹] :=
begin
refine alg_hom.comp _ (to_finsupp_iso_alg R).to_alg_hom,
exact (map_domain_alg_hom R R int.of_nat_hom),
end
@[simp]
lemma polynomial.to_laurent_alg_apply [comm_semiring R] (f : R[X]) :
f.to_laurent_alg = f.to_laurent := rfl
namespace laurent_polynomial
section semiring
variables [semiring R]
lemma single_zero_one_eq_one : (single 0 1 : R[T;T⁻¹]) = (1 : R[T;T⁻¹]) := rfl
/-! ### The functions `C` and `T`. -/
/-- The ring homomorphism `C`, including `R` into the ring of Laurent polynomials over `R` as
the constant Laurent polynomials. -/
def C : R →+* R[T;T⁻¹] :=
single_zero_ring_hom
lemma algebra_map_apply {R A : Type*} [comm_semiring R] [semiring A] [algebra R A] (r : R) :
algebra_map R (laurent_polynomial A) r = C (algebra_map R A r) :=
rfl
/--
When we have `[comm_semiring R]`, the function `C` is the same as `algebra_map R R[T;T⁻¹]`.
(But note that `C` is defined when `R` is not necessarily commutative, in which case
`algebra_map` is not available.)
-/
lemma C_eq_algebra_map {R : Type*} [comm_semiring R] (r : R) :
C r = algebra_map R R[T;T⁻¹] r :=
rfl
lemma single_eq_C (r : R) : single 0 r = C r := rfl
/-- The function `n ↦ T ^ n`, implemented as a sequence `ℤ → R[T;T⁻¹]`.
Using directly `T ^ n` does not work, since we want the exponents to be of Type `ℤ` and there
is no `ℤ`-power defined on `R[T;T⁻¹]`. Using that `T` is a unit introduces extra coercions.
For these reasons, the definition of `T` is as a sequence. -/
def T (n : ℤ) : R[T;T⁻¹] := single n 1
@[simp]
lemma T_zero : (T 0 : R[T;T⁻¹]) = 1 := rfl
lemma T_add (m n : ℤ) : (T (m + n) : R[T;T⁻¹]) = T m * T n :=
by { convert single_mul_single.symm, simp [T] }
lemma T_sub (m n : ℤ) : (T (m - n) : R[T;T⁻¹]) = T m * T (-n) :=
by rw [← T_add, sub_eq_add_neg]
@[simp]
lemma T_pow (m : ℤ) (n : ℕ) : (T m ^ n : R[T;T⁻¹]) = T (n * m) :=
by rw [T, T, single_pow n, one_pow, nsmul_eq_mul]
/-- The `simp` version of `mul_assoc`, in the presence of `T`'s. -/
@[simp]
lemma mul_T_assoc (f : R[T;T⁻¹]) (m n : ℤ) : f * T m * T n = f * T (m + n) :=
by simp [← T_add, mul_assoc]
@[simp]
lemma single_eq_C_mul_T (r : R) (n : ℤ) :
(single n r : R[T;T⁻¹]) = (C r * T n : R[T;T⁻¹]) :=
by convert single_mul_single.symm; simp
-- This lemma locks in the right changes and is what Lean proved directly.
-- The actual `simp`-normal form of a Laurent monomial is `C a * T n`, whenever it can be reached.
@[simp]
lemma _root_.polynomial.to_laurent_C_mul_T (n : ℕ) (r : R) :
((polynomial.monomial n r).to_laurent : R[T;T⁻¹]) = C r * T n :=
show map_domain coe (monomial n r).to_finsupp = (C r * T n : R[T;T⁻¹]),
by rw [to_finsupp_monomial, map_domain_single, single_eq_C_mul_T]
@[simp]
lemma _root_.polynomial.to_laurent_C (r : R) : (polynomial.C r).to_laurent = C r :=
begin
convert polynomial.to_laurent_C_mul_T 0 r,
simp only [int.coe_nat_zero, T_zero, mul_one],
end
@[simp]
lemma _root_.polynomial.to_laurent_X : (polynomial.X.to_laurent : R[T;T⁻¹]) = T 1 :=
begin
have : (polynomial.X : R[X]) = monomial 1 1,
{ simp [← C_mul_X_pow_eq_monomial] },
simp [this, polynomial.to_laurent_C_mul_T],
end
@[simp] lemma _root_.polynomial.to_laurent_one : (polynomial.to_laurent : R[X] → R[T;T⁻¹]) 1 = 1 :=
map_one polynomial.to_laurent
@[simp]
lemma _root_.polynomial.to_laurent_C_mul_eq (r : R) (f : R[X]) :
(polynomial.C r * f).to_laurent = C r * f.to_laurent :=
by simp only [_root_.map_mul, polynomial.to_laurent_C]
@[simp]
lemma _root_.polynomial.to_laurent_X_pow (n : ℕ) : (X ^ n : R[X]).to_laurent = T n :=
by simp only [map_pow, polynomial.to_laurent_X, T_pow, mul_one]
@[simp]
lemma _root_.polynomial.to_laurent_C_mul_X_pow (n : ℕ) (r : R) :
(polynomial.C r * X ^ n).to_laurent = C r * T n :=
by simp only [_root_.map_mul, polynomial.to_laurent_C, polynomial.to_laurent_X_pow]
instance invertible_T (n : ℤ) : invertible (T n : R[T;T⁻¹]) :=
{ inv_of := T (- n),
inv_of_mul_self := by rw [← T_add, add_left_neg, T_zero],
mul_inv_of_self := by rw [← T_add, add_right_neg, T_zero] }
@[simp]
lemma inv_of_T (n : ℤ) : ⅟ (T n : R[T;T⁻¹]) = T (- n) := rfl
lemma is_unit_T (n : ℤ) : is_unit (T n : R[T;T⁻¹]) :=
is_unit_of_invertible _
@[elab_as_eliminator] protected lemma induction_on {M : R[T;T⁻¹] → Prop} (p : R[T;T⁻¹])
(h_C : ∀ a, M (C a))
(h_add : ∀ {p q}, M p → M q → M (p + q))
(h_C_mul_T : ∀ (n : ℕ) (a : R), M (C a * T n) → M (C a * T (n + 1)))
(h_C_mul_T_Z : ∀ (n : ℕ) (a : R), M (C a * T (- n)) → M (C a * T (- n - 1))) :
M p :=
begin
have A : ∀ {n : ℤ} {a : R}, M (C a * T n),
{ assume n a,
apply n.induction_on,
{ simpa only [T_zero, mul_one] using h_C a },
{ exact λ m, h_C_mul_T m a },
{ exact λ m, h_C_mul_T_Z m a } },
have B : ∀ (s : finset ℤ), M (s.sum (λ (n : ℤ), C (p.to_fun n) * T n)),
{ apply finset.induction,
{ convert h_C 0, simp only [finset.sum_empty, _root_.map_zero] },
{ assume n s ns ih, rw finset.sum_insert ns, exact h_add A ih } },
convert B p.support,
ext a,
simp_rw [← single_eq_C_mul_T, finset.sum_apply', single_apply, finset.sum_ite_eq'],
split_ifs with h h,
{ refl },
{ exact finsupp.not_mem_support_iff.mp h }
end
/-- To prove something about Laurent polynomials, it suffices to show that
* the condition is closed under taking sums, and
* it holds for monomials.
-/
@[elab_as_eliminator] protected lemma induction_on' {M : R[T;T⁻¹] → Prop} (p : R[T;T⁻¹])
(h_add : ∀p q, M p → M q → M (p + q))
(h_C_mul_T : ∀(n : ℤ) (a : R), M (C a * T n)) :
M p :=
begin
refine p.induction_on (λ a, _) h_add _ _;
try { exact λ n f _, h_C_mul_T _ f },
convert h_C_mul_T 0 a,
exact (mul_one _).symm,
end
lemma commute_T (n : ℤ) (f : R[T;T⁻¹]) : commute (T n) f :=
f.induction_on' (λ p q Tp Tq, commute.add_right Tp Tq) $ λ m a,
show T n * _ = _, by
{ rw [T, T, ← single_eq_C, single_mul_single, single_mul_single, single_mul_single],
simp [add_comm] }
@[simp]
lemma T_mul (n : ℤ) (f : R[T;T⁻¹]) : T n * f = f * T n :=
(commute_T n f).eq
/-- `trunc : R[T;T⁻¹] →+ R[X]` maps a Laurent polynomial `f` to the polynomial whose terms of
nonnegative degree coincide with the ones of `f`. The terms of negative degree of `f` "vanish".
`trunc` is a left-inverse to `polynomial.to_laurent`. -/
def trunc : R[T;T⁻¹] →+ R[X] :=
((to_finsupp_iso R).symm.to_add_monoid_hom).comp $
comap_domain.add_monoid_hom $ λ a b, int.of_nat.inj
@[simp]
lemma trunc_C_mul_T (n : ℤ) (r : R) : trunc (C r * T n) = ite (0 ≤ n) (monomial n.to_nat r) 0 :=
begin
apply (to_finsupp_iso R).injective,
rw [← single_eq_C_mul_T, trunc, add_monoid_hom.coe_comp, function.comp_app,
comap_domain.add_monoid_hom_apply, to_finsupp_iso_apply],
by_cases n0 : 0 ≤ n,
{ lift n to ℕ using n0,
erw [comap_domain_single, to_finsupp_iso_symm_apply],
simp only [int.coe_nat_nonneg, int.to_nat_coe_nat, if_true, to_finsupp_iso_apply,
to_finsupp_monomial] },
{ lift (- n) to ℕ using (neg_pos.mpr (not_le.mp n0)).le with m,
rw [to_finsupp_iso_apply, to_finsupp_inj, if_neg n0],
erw to_finsupp_iso_symm_apply,
ext a,
have := ((not_le.mp n0).trans_le (int.coe_zero_le a)).ne',
simp only [coeff, comap_domain_apply, int.of_nat_eq_coe, coeff_zero, single_apply_eq_zero, this,
is_empty.forall_iff] }
end
@[simp] lemma left_inverse_trunc_to_laurent :
function.left_inverse (trunc : R[T;T⁻¹] → R[X]) polynomial.to_laurent :=
begin
refine λ f, f.induction_on' _ _,
{ exact λ f g hf hg, by simp only [hf, hg, _root_.map_add] },
{ exact λ n r, by simp only [polynomial.to_laurent_C_mul_T, trunc_C_mul_T, int.coe_nat_nonneg,
int.to_nat_coe_nat, if_true] }
end
@[simp] lemma _root_.polynomial.trunc_to_laurent (f : R[X]) : trunc f.to_laurent = f :=
left_inverse_trunc_to_laurent _
lemma _root_.polynomial.to_laurent_injective :
function.injective (polynomial.to_laurent : R[X] → R[T;T⁻¹]) :=
left_inverse_trunc_to_laurent.injective
@[simp] lemma _root_.polynomial.to_laurent_inj (f g : R[X]) :
f.to_laurent = g.to_laurent ↔ f = g :=
⟨λ h, polynomial.to_laurent_injective h, congr_arg _⟩
lemma _root_.polynomial.to_laurent_ne_zero {f : R[X]} :
f ≠ 0 ↔ f.to_laurent ≠ 0 :=
(map_ne_zero_iff _ (by exact polynomial.to_laurent_injective)).symm
lemma exists_T_pow (f : R[T;T⁻¹]) :
∃ (n : ℕ) (f' : R[X]), f'.to_laurent = f * T n :=
begin
apply f.induction_on' _ (λ n a, _); clear f,
{ rintros f g ⟨m, fn, hf⟩ ⟨n, gn, hg⟩,
refine ⟨m + n, fn * X ^ n + gn * X ^ m, _⟩,
simp only [hf, hg, add_mul, add_comm (n : ℤ), map_add, map_mul, polynomial.to_laurent_X_pow,
mul_T_assoc, int.coe_nat_add] },
{ cases n with n n,
{ exact ⟨0, polynomial.C a * X ^ n, by simp⟩ },
{ refine ⟨n + 1, polynomial.C a, _⟩,
simp only [int.neg_succ_of_nat_eq, polynomial.to_laurent_C, int.coe_nat_succ, mul_T_assoc,
add_left_neg, T_zero, mul_one] } }
end
/-- This is a version of `exists_T_pow` stated as an induction principle. -/
@[elab_as_eliminator] lemma induction_on_mul_T {Q : R[T;T⁻¹] → Prop} (f : R[T;T⁻¹])
(Qf : ∀ {f : R[X]} {n : ℕ}, Q (f.to_laurent * T (- n))) :
Q f :=
begin
rcases f.exists_T_pow with ⟨n, f', hf⟩,
rw [← mul_one f, ← T_zero, ← nat.cast_zero, ← nat.sub_self n, nat.cast_sub rfl.le, T_sub,
← mul_assoc, ← hf],
exact Qf,
end
/-- Suppose that `Q` is a statement about Laurent polynomials such that
* `Q` is true on *ordinary* polynomials;
* `Q (f * T)` implies `Q f`;
it follow that `Q` is true on all Laurent polynomials. -/
lemma reduce_to_polynomial_of_mul_T (f : R[T;T⁻¹]) {Q : R[T;T⁻¹] → Prop}
(Qf : ∀ (f : R[X]), Q f.to_laurent)
(QT : ∀ f, Q (f * T 1) → Q f) :
Q f :=
begin
induction f using laurent_polynomial.induction_on_mul_T with f n,
induction n with n hn,
{ simpa only [int.coe_nat_zero, neg_zero, T_zero, mul_one] using Qf _ },
{ convert QT _ _,
simpa using hn }
end
section support
lemma support_C_mul_T (a : R) (n : ℤ) : (C a * T n).support ⊆ {n} :=
by simpa only [← single_eq_C_mul_T] using support_single_subset
lemma support_C_mul_T_of_ne_zero {a : R} (a0 : a ≠ 0) (n : ℤ) : (C a * T n).support = {n} :=
begin
rw ← single_eq_C_mul_T,
exact support_single_ne_zero _ a0,
end
/-- The support of a polynomial `f` is a finset in `ℕ`. The lemma `to_laurent_support f`
shows that the support of `f.to_laurent` is the same finset, but viewed in `ℤ` under the natural
inclusion `ℕ ↪ ℤ`. -/
lemma to_laurent_support (f : R[X]) :
f.to_laurent.support = f.support.map nat.cast_embedding :=
begin
generalize' hd : f.support = s,
revert f,
refine finset.induction_on s _ _; clear s,
{ simp only [polynomial.support_eq_empty, map_zero, finsupp.support_zero, eq_self_iff_true,
implies_true_iff, finset.map_empty] {contextual := tt} },
{ intros a s as hf f fs,
have : (erase a f).to_laurent.support = s.map nat.cast_embedding := hf (f.erase a) (by simp only
[fs, finset.erase_eq_of_not_mem as, polynomial.support_erase, finset.erase_insert_eq_erase]),
rw [← monomial_add_erase f a, finset.map_insert, ← this, map_add,
polynomial.to_laurent_C_mul_T, support_add_eq, finset.insert_eq],
{ congr,
exact support_C_mul_T_of_ne_zero (polynomial.mem_support_iff.mp (by simp [fs])) _ },
{ rw this,
exact disjoint.mono_left (support_C_mul_T _ _) (by simpa) } }
end
end support
section degrees
/-- The degree of a Laurent polynomial takes values in `with_bot ℤ`.
If `f : R[T;T⁻¹]` is a Laurent polynomial, then `f.degree` is the maximum of its support of `f`,
or `⊥`, if `f = 0`. -/
def degree (f : R[T;T⁻¹]) : with_bot ℤ := f.support.max
@[simp] lemma degree_zero : degree (0 : R[T;T⁻¹]) = ⊥ := rfl
@[simp]
section exact_degrees
open_locale classical
@[simp] lemma degree_C_mul_T (n : ℤ) (a : R) (a0 : a ≠ 0) : (C a * T n).degree = n :=
begin
rw degree,
convert finset.max_singleton,
refine support_eq_singleton.mpr _,
simp only [← single_eq_C_mul_T, single_eq_same, a0, ne.def, not_false_iff, eq_self_iff_true,
and_self],
end
lemma degree_C_mul_T_ite (n : ℤ) (a : R) : (C a * T n).degree = ite (a = 0) ⊥ n :=
by split_ifs with h h;
simp only [h, map_zero, zero_mul, degree_zero, degree_C_mul_T, ne.def, not_false_iff]
@[simp] lemma degree_T [nontrivial R] (n : ℤ) : (T n : R[T;T⁻¹]).degree = n :=
begin
rw [← one_mul (T n), ← map_one C],
exact degree_C_mul_T n 1 (one_ne_zero : (1 : R) ≠ 0),
end
lemma degree_C {a : R} (a0 : a ≠ 0) : (C a).degree = 0 :=
begin
rw [← mul_one (C a), ← T_zero],
exact degree_C_mul_T 0 a a0
end
lemma degree_C_ite (a : R) : (C a).degree = ite (a = 0) ⊥ 0 :=
by split_ifs with h h;
simp only [h, map_zero, degree_zero, degree_C, ne.def, not_false_iff]
end exact_degrees
section degree_bounds
lemma degree_C_mul_T_le (n : ℤ) (a : R) : (C a * T n).degree ≤ n :=
begin
by_cases a0 : a = 0,
{ simp only [a0, map_zero, zero_mul, degree_zero, bot_le] },
{ exact (degree_C_mul_T n a a0).le }
end
lemma degree_T_le (n : ℤ) : (T n : R[T;T⁻¹]).degree ≤ n :=
(le_of_eq (by rw [map_one, one_mul])).trans (degree_C_mul_T_le n (1 : R))
lemma degree_C_le (a : R) : (C a).degree ≤ 0 :=
(le_of_eq (by rw [T_zero, mul_one])).trans (degree_C_mul_T_le 0 a)
end degree_bounds
end degrees
instance : module R[X] R[T;T⁻¹] :=
module.comp_hom _ polynomial.to_laurent
instance (R : Type*) [semiring R] : is_scalar_tower R[X] R[X] R[T;T⁻¹] :=
{ smul_assoc := λ x y z, by simp only [has_smul.smul, has_smul.comp.smul, map_mul, mul_assoc] }
end semiring
section comm_semiring
variable [comm_semiring R]
instance algebra_polynomial (R : Type*) [comm_semiring R] : algebra R[X] R[T;T⁻¹] :=
{ commutes' := λ f l, by simp [mul_comm],
smul_def' := λ f l, rfl,
.. polynomial.to_laurent }
lemma algebra_map_X_pow (n : ℕ) : algebra_map R[X] R[T;T⁻¹] (X ^ n) = T n :=
polynomial.to_laurent_X_pow n
@[simp]
lemma algebra_map_eq_to_laurent (f : R[X]) : algebra_map R[X] R[T;T⁻¹] f = f.to_laurent :=
rfl
lemma is_localization : is_localization (submonoid.closure ({X} : set R[X])) R[T;T⁻¹] :=
{ map_units := λ t, begin
cases t with t ht,
rcases submonoid.mem_closure_singleton.mp ht with ⟨n, rfl⟩,
simp only [is_unit_T n, set_like.coe_mk, algebra_map_eq_to_laurent, polynomial.to_laurent_X_pow]
end,
surj := λ f, begin
induction f using laurent_polynomial.induction_on_mul_T with f n,
have := (submonoid.closure ({X} : set R[X])).pow_mem submonoid.mem_closure_singleton_self n,
refine ⟨(f, ⟨_, this⟩), _⟩,
simp only [set_like.coe_mk, algebra_map_eq_to_laurent, polynomial.to_laurent_X_pow, mul_T_assoc,
add_left_neg, T_zero, mul_one],
end,
eq_iff_exists := λ f g, begin
rw [algebra_map_eq_to_laurent, algebra_map_eq_to_laurent, polynomial.to_laurent_inj],
refine ⟨_, _⟩,
{ rintro rfl,
exact ⟨1, rfl⟩ },
{ rintro ⟨⟨h, hX⟩, h⟩,
rcases submonoid.mem_closure_singleton.mp hX with ⟨n, rfl⟩,
exact mul_X_pow_injective n h }
end }
end comm_semiring
end laurent_polynomial
|
{
"alphanum_fraction": null,
"author": "leanprover-community",
"avg_line_length": null,
"converted": null,
"ext": null,
"file": null,
"hexsha": null,
"include": null,
"lang": null,
"length": null,
"llama_tokens": null,
"mathlib_filename": null,
"max_forks_count": null,
"max_forks_repo_forks_event_max_datetime": null,
"max_forks_repo_forks_event_min_datetime": null,
"max_forks_repo_head_hexsha": null,
"max_forks_repo_licenses": null,
"max_forks_repo_name": null,
"max_forks_repo_path": null,
"max_issues_count": null,
"max_issues_repo_head_hexsha": null,
"max_issues_repo_issues_event_max_datetime": null,
"max_issues_repo_issues_event_min_datetime": null,
"max_issues_repo_licenses": null,
"max_issues_repo_name": null,
"max_issues_repo_path": null,
"max_line_length": null,
"max_stars_count": null,
"max_stars_repo_head_hexsha": null,
"max_stars_repo_licenses": null,
"max_stars_repo_name": null,
"max_stars_repo_path": null,
"max_stars_repo_stars_event_max_datetime": null,
"max_stars_repo_stars_event_min_datetime": null,
"num_tokens": null,
"path": "github-repos/lean/leanprover-community-mathlib/mathlib-5e526d18cea33550268dcbbddcb822d5cde40654/src/data/polynomial/laurent.lean",
"reason": null,
"repo": "mathlib",
"save_path": "github-repos/lean/leanprover-community-mathlib",
"sha": "5e526d18cea33550268dcbbddcb822d5cde40654",
"size": null
}
|
from collections import OrderedDict
from enum import Enum, unique
import numpy as np
from typing import Dict, Union, Iterator, Type, Tuple
from meio.gsm.dag_gsm import GuaranteedServiceModelDAG
from meio.gsm.tree_gsm import Stage, GuaranteedServiceModelTree, GuaranteedServiceModel
def create_supply_chain_network_from_iterator(supply_chain: Iterator) -> Dict[str, Stage]:
"""
Method to read from an iterator and initialise the dictionary of stages forming the supply chain
The data file is a comma separated txt with one stage per row and
the following field names for columns:
- stage_id
- lead_time
- max_s_time
- cost_rate
- cap_constraint
- risk_pool
- ext_demand_mean
- ext_demand_std
- ext_demand_thres
- up_stages (up_stage_1,phi_1,up_stage_2,phi_2,...,up_stage_n,phi_n)
:returns: dictionary of stage objects with keys being the unique ids of the stages
"""
stage_configs = OrderedDict() # type: OrderedDict[str,Dict]
u_stages = {} # type: Dict[str,Dict[str,int]]
for i, row in enumerate(supply_chain):
if i == 0:
continue
line = row.strip("\n").split(",")
stage_config = {} # type: Dict[str, Union[str, int, float, Dict[str, int]]]
stage_config['_id'] = stage_id = line[0]
stage_config['lead_time'] = int(float(line[1]))
stage_config["max_s_time"] = int(line[2]) if line[2] != "" else np.inf
stage_config['added_cost'] = float(line[3])
if line[4] != "":
stage_config['risk_pool'] = int(line[4])
if line[5] != "":
stage_config['is_ext_demand_stage'] = True
stage_config['demand_mean'] = float(line[5])
stage_config['demand_std'] = float(line[6])
stage_config['demand_thres'] = float(line[7])
up_stages = {} # type: Dict[str, int]
up_stages_list = line[8:]
if len(up_stages_list) > 1:
for s in range(0, len(up_stages_list), 2):
up_stage_id = str(up_stages_list[s])
phi = int(up_stages_list[s+1])
up_stages[up_stage_id] = phi
stage_config["up_stages"] = u_stages[stage_id] = up_stages
stage_configs[stage_id] = stage_config
d_stages = {stage_id:{} for stage_id in u_stages} # type: Dict[str, Dict[str, int]]
for stage_id,up_stages in u_stages.items():
for up_stage_id,phi in up_stages.items():
d_stages[up_stage_id][stage_id] = phi
for stage_id in stage_configs:
stage_configs[stage_id]["up_stages"] = u_stages[stage_id]
stage_configs[stage_id]["down_stages"] = d_stages[stage_id]
stages = OrderedDict((stage_id,Stage(**stage_config)) for stage_id, stage_config
in stage_configs.items())
return stages
def read_supply_chain_from_txt(supply_chain_txt_file: str) -> Dict[str, Stage]:
"""
Method to read from file and initialise the dictionary of stages forming the supply chain
:returns: dictionary of stage objects with keys being the unique ids of the stages
"""
with open(supply_chain_txt_file, "r") as f:
stages = create_supply_chain_network_from_iterator(f)
return stages
@unique
class GSM(Enum):
Tree = 'Tree' # Spanning tree
CoC = 'CoC' # Clusters of commonality
DAG = 'DAG' # Directed Acyclic graphs
def create_gsm_instance(gsm_type: GSM, supply_chain_filename: str) \
-> Tuple[Dict[str, Stage], GuaranteedServiceModel]:
"""
GSM Factory method. Does not necessarily check for compatibility of given GSM type with
the network topology described in the config file; any checking is a bonus of the Tree model
construction.
:param gsm_type: The type of the GSM model (e.g. spanning tree or clusters of commonality).
:param supply_chain_filename: The name of the config file defining the topology and other
parameters.
:raise UnSupportedGSMException: Not all types of GSM can be created with this utility.
:raise IncompatibleGraphTopology: The specified type is inconsistent with the topology
described in the config file.
:raises: InconsistentGSMConfiguration: Network topology labels not as expected.
:return: The stages of the nework and the gsm model of the appropriate type, if config is
compatible with what was asked for.
"""
stages = read_supply_chain_from_txt(supply_chain_filename)
creator = {
'Tree': GuaranteedServiceModelTree,
'DAG': GuaranteedServiceModelDAG
} # type: Dict[str, Type[GuaranteedServiceModel]]
return stages, creator[gsm_type.value](stages)
|
{
"alphanum_fraction": 0.6679357022,
"author": null,
"avg_line_length": 41.1130434783,
"converted": null,
"ext": "py",
"file": null,
"hexsha": "5bf5139bc1a0e62f24a0c6dbd55e68e2b46b69f6",
"include": true,
"lang": "Python",
"length": null,
"llama_tokens": null,
"mathlib_filename": null,
"max_forks_count": 2,
"max_forks_repo_forks_event_max_datetime": "2021-04-19T09:01:12.000Z",
"max_forks_repo_forks_event_min_datetime": "2021-03-24T17:20:06.000Z",
"max_forks_repo_head_hexsha": "c2da8c1e9ecdc42c59b9de73224b3d50ee1c9786",
"max_forks_repo_licenses": [
"Apache-2.0"
],
"max_forks_repo_name": "dmcnamee/snc",
"max_forks_repo_path": "src/meio/gsm/utils.py",
"max_issues_count": 3,
"max_issues_repo_head_hexsha": "c2da8c1e9ecdc42c59b9de73224b3d50ee1c9786",
"max_issues_repo_issues_event_max_datetime": "2021-05-08T22:06:47.000Z",
"max_issues_repo_issues_event_min_datetime": "2021-03-26T01:16:08.000Z",
"max_issues_repo_licenses": [
"Apache-2.0"
],
"max_issues_repo_name": "dmcnamee/snc",
"max_issues_repo_path": "src/meio/gsm/utils.py",
"max_line_length": 100,
"max_stars_count": 5,
"max_stars_repo_head_hexsha": "c2da8c1e9ecdc42c59b9de73224b3d50ee1c9786",
"max_stars_repo_licenses": [
"Apache-2.0"
],
"max_stars_repo_name": "dmcnamee/snc",
"max_stars_repo_path": "src/meio/gsm/utils.py",
"max_stars_repo_stars_event_max_datetime": "2021-11-17T12:44:51.000Z",
"max_stars_repo_stars_event_min_datetime": "2021-03-24T16:23:10.000Z",
"num_tokens": 1159,
"path": null,
"reason": "import numpy",
"repo": null,
"save_path": null,
"sha": null,
"size": 4728
}
|
[STATEMENT]
lemma eventually_nhds_top:
fixes P :: "'a :: {order_top,linorder_topology} \<Rightarrow> bool"
and b :: 'a
assumes "b < top"
shows "eventually P (nhds top) \<longleftrightarrow> (\<exists>b<top. (\<forall>z. b < z \<longrightarrow> P z))"
[PROOF STATE]
proof (prove)
goal (1 subgoal):
1. eventually P (nhds top) = (\<exists>b<top. \<forall>z>b. P z)
[PROOF STEP]
unfolding eventually_nhds
[PROOF STATE]
proof (prove)
goal (1 subgoal):
1. (\<exists>S. open S \<and> top \<in> S \<and> Ball S P) = (\<exists>b<top. \<forall>z>b. P z)
[PROOF STEP]
proof safe
[PROOF STATE]
proof (state)
goal (2 subgoals):
1. \<And>S. \<lbrakk>open S; top \<in> S; Ball S P\<rbrakk> \<Longrightarrow> \<exists>b<top. \<forall>z>b. P z
2. \<And>b. \<lbrakk>b < top; \<forall>z>b. P z\<rbrakk> \<Longrightarrow> \<exists>S. open S \<and> top \<in> S \<and> Ball S P
[PROOF STEP]
fix S :: "'a set"
[PROOF STATE]
proof (state)
goal (2 subgoals):
1. \<And>S. \<lbrakk>open S; top \<in> S; Ball S P\<rbrakk> \<Longrightarrow> \<exists>b<top. \<forall>z>b. P z
2. \<And>b. \<lbrakk>b < top; \<forall>z>b. P z\<rbrakk> \<Longrightarrow> \<exists>S. open S \<and> top \<in> S \<and> Ball S P
[PROOF STEP]
assume "open S" "top \<in> S"
[PROOF STATE]
proof (state)
this:
open S
top \<in> S
goal (2 subgoals):
1. \<And>S. \<lbrakk>open S; top \<in> S; Ball S P\<rbrakk> \<Longrightarrow> \<exists>b<top. \<forall>z>b. P z
2. \<And>b. \<lbrakk>b < top; \<forall>z>b. P z\<rbrakk> \<Longrightarrow> \<exists>S. open S \<and> top \<in> S \<and> Ball S P
[PROOF STEP]
note open_left[OF this \<open>b < top\<close>]
[PROOF STATE]
proof (state)
this:
\<exists>b<top. {b<..top} \<subseteq> S
goal (2 subgoals):
1. \<And>S. \<lbrakk>open S; top \<in> S; Ball S P\<rbrakk> \<Longrightarrow> \<exists>b<top. \<forall>z>b. P z
2. \<And>b. \<lbrakk>b < top; \<forall>z>b. P z\<rbrakk> \<Longrightarrow> \<exists>S. open S \<and> top \<in> S \<and> Ball S P
[PROOF STEP]
moreover
[PROOF STATE]
proof (state)
this:
\<exists>b<top. {b<..top} \<subseteq> S
goal (2 subgoals):
1. \<And>S. \<lbrakk>open S; top \<in> S; Ball S P\<rbrakk> \<Longrightarrow> \<exists>b<top. \<forall>z>b. P z
2. \<And>b. \<lbrakk>b < top; \<forall>z>b. P z\<rbrakk> \<Longrightarrow> \<exists>S. open S \<and> top \<in> S \<and> Ball S P
[PROOF STEP]
assume "\<forall>s\<in>S. P s"
[PROOF STATE]
proof (state)
this:
\<forall>s\<in>S. P s
goal (2 subgoals):
1. \<And>S. \<lbrakk>open S; top \<in> S; Ball S P\<rbrakk> \<Longrightarrow> \<exists>b<top. \<forall>z>b. P z
2. \<And>b. \<lbrakk>b < top; \<forall>z>b. P z\<rbrakk> \<Longrightarrow> \<exists>S. open S \<and> top \<in> S \<and> Ball S P
[PROOF STEP]
ultimately
[PROOF STATE]
proof (chain)
picking this:
\<exists>b<top. {b<..top} \<subseteq> S
\<forall>s\<in>S. P s
[PROOF STEP]
show "\<exists>b<top. \<forall>z>b. P z"
[PROOF STATE]
proof (prove)
using this:
\<exists>b<top. {b<..top} \<subseteq> S
\<forall>s\<in>S. P s
goal (1 subgoal):
1. \<exists>b<top. \<forall>z>b. P z
[PROOF STEP]
by (auto simp: subset_eq Ball_def)
[PROOF STATE]
proof (state)
this:
\<exists>b<top. \<forall>z>b. P z
goal (1 subgoal):
1. \<And>b. \<lbrakk>b < top; \<forall>z>b. P z\<rbrakk> \<Longrightarrow> \<exists>S. open S \<and> top \<in> S \<and> Ball S P
[PROOF STEP]
next
[PROOF STATE]
proof (state)
goal (1 subgoal):
1. \<And>b. \<lbrakk>b < top; \<forall>z>b. P z\<rbrakk> \<Longrightarrow> \<exists>S. open S \<and> top \<in> S \<and> Ball S P
[PROOF STEP]
fix b
[PROOF STATE]
proof (state)
goal (1 subgoal):
1. \<And>b. \<lbrakk>b < top; \<forall>z>b. P z\<rbrakk> \<Longrightarrow> \<exists>S. open S \<and> top \<in> S \<and> Ball S P
[PROOF STEP]
assume "b < top" "\<forall>z>b. P z"
[PROOF STATE]
proof (state)
this:
b < top
\<forall>z>b. P z
goal (1 subgoal):
1. \<And>b. \<lbrakk>b < top; \<forall>z>b. P z\<rbrakk> \<Longrightarrow> \<exists>S. open S \<and> top \<in> S \<and> Ball S P
[PROOF STEP]
then
[PROOF STATE]
proof (chain)
picking this:
b < top
\<forall>z>b. P z
[PROOF STEP]
show "\<exists>S. open S \<and> top \<in> S \<and> (\<forall>xa\<in>S. P xa)"
[PROOF STATE]
proof (prove)
using this:
b < top
\<forall>z>b. P z
goal (1 subgoal):
1. \<exists>S. open S \<and> top \<in> S \<and> (\<forall>xa\<in>S. P xa)
[PROOF STEP]
by (intro exI[of _ "{b <..}"]) auto
[PROOF STATE]
proof (state)
this:
\<exists>S. open S \<and> top \<in> S \<and> (\<forall>xa\<in>S. P xa)
goal:
No subgoals!
[PROOF STEP]
qed
|
{
"alphanum_fraction": null,
"author": null,
"avg_line_length": null,
"converted": null,
"ext": null,
"file": null,
"hexsha": null,
"include": null,
"lang": null,
"length": 17,
"llama_tokens": 1977,
"mathlib_filename": null,
"max_forks_count": null,
"max_forks_repo_forks_event_max_datetime": null,
"max_forks_repo_forks_event_min_datetime": null,
"max_forks_repo_head_hexsha": null,
"max_forks_repo_licenses": null,
"max_forks_repo_name": null,
"max_forks_repo_path": null,
"max_issues_count": null,
"max_issues_repo_head_hexsha": null,
"max_issues_repo_issues_event_max_datetime": null,
"max_issues_repo_issues_event_min_datetime": null,
"max_issues_repo_licenses": null,
"max_issues_repo_name": null,
"max_issues_repo_path": null,
"max_line_length": null,
"max_stars_count": null,
"max_stars_repo_head_hexsha": null,
"max_stars_repo_licenses": null,
"max_stars_repo_name": null,
"max_stars_repo_path": null,
"max_stars_repo_stars_event_max_datetime": null,
"max_stars_repo_stars_event_min_datetime": null,
"num_tokens": null,
"path": null,
"reason": null,
"repo": null,
"save_path": null,
"sha": null,
"size": null
}
|
import numpy as np
import pytest
from sklearn.utils._readonly_array_wrapper import ReadonlyArrayWrapper, _test_sum
from sklearn.utils._testing import create_memmap_backed_data
def _readonly_array_copy(x):
"""Return a copy of x with flag writeable set to False."""
y = x.copy()
y.flags["WRITEABLE"] = False
return y
def _create_memmap_backed_data(data):
return create_memmap_backed_data(
data, mmap_mode="r", return_folder=False, aligned=True
)
@pytest.mark.parametrize("readonly", [_readonly_array_copy, _create_memmap_backed_data])
@pytest.mark.parametrize("dtype", [np.float32, np.float64, np.int32, np.int64])
def test_readonly_array_wrapper(readonly, dtype):
"""Test that ReadonlyWrapper allows working with fused-typed."""
x = np.arange(10).astype(dtype)
sum_origin = _test_sum(x)
# ReadonlyArrayWrapper works with writable buffers
sum_writable = _test_sum(ReadonlyArrayWrapper(x))
assert sum_writable == pytest.approx(sum_origin, rel=1e-11)
# Now, check on readonly buffers
x_readonly = readonly(x)
with pytest.raises(ValueError, match="buffer source array is read-only"):
_test_sum(x_readonly)
x_readonly = ReadonlyArrayWrapper(x_readonly)
sum_readonly = _test_sum(x_readonly)
assert sum_readonly == pytest.approx(sum_origin, rel=1e-11)
|
{
"alphanum_fraction": 0.7447916667,
"author": null,
"avg_line_length": 32,
"converted": null,
"ext": "py",
"file": null,
"hexsha": "38163cc2461ce878eb638401b74bea21507f701f",
"include": true,
"lang": "Python",
"length": null,
"llama_tokens": null,
"mathlib_filename": null,
"max_forks_count": 26886,
"max_forks_repo_forks_event_max_datetime": "2022-03-31T18:03:23.000Z",
"max_forks_repo_forks_event_min_datetime": "2015-01-01T00:59:27.000Z",
"max_forks_repo_head_hexsha": "92bc7fbe1040f49e820473e33cc3902a5a7177c7",
"max_forks_repo_licenses": [
"MIT"
],
"max_forks_repo_name": "13rianlucero/CrabAgePrediction",
"max_forks_repo_path": "crabageprediction/venv/Lib/site-packages/sklearn/utils/tests/test_readonly_wrapper.py",
"max_issues_count": 17065,
"max_issues_repo_head_hexsha": "92bc7fbe1040f49e820473e33cc3902a5a7177c7",
"max_issues_repo_issues_event_max_datetime": "2022-03-31T23:48:34.000Z",
"max_issues_repo_issues_event_min_datetime": "2015-01-01T02:01:58.000Z",
"max_issues_repo_licenses": [
"MIT"
],
"max_issues_repo_name": "13rianlucero/CrabAgePrediction",
"max_issues_repo_path": "crabageprediction/venv/Lib/site-packages/sklearn/utils/tests/test_readonly_wrapper.py",
"max_line_length": 88,
"max_stars_count": 50961,
"max_stars_repo_head_hexsha": "92bc7fbe1040f49e820473e33cc3902a5a7177c7",
"max_stars_repo_licenses": [
"MIT"
],
"max_stars_repo_name": "13rianlucero/CrabAgePrediction",
"max_stars_repo_path": "crabageprediction/venv/Lib/site-packages/sklearn/utils/tests/test_readonly_wrapper.py",
"max_stars_repo_stars_event_max_datetime": "2022-03-31T23:40:12.000Z",
"max_stars_repo_stars_event_min_datetime": "2015-01-01T06:06:31.000Z",
"num_tokens": 328,
"path": null,
"reason": "import numpy",
"repo": null,
"save_path": null,
"sha": null,
"size": 1344
}
|
module Variables
include("./constants.jl")
using JuMP
using .Constants
export init_variables
function init_variables(m)
#Dimensions of box
@variable(m, lob_length_of_box[i=1:length(boxes)] == boxes[i][1])
@variable(m, wob_width_of_box[i=1:length(boxes)] == boxes[i][2])
@variable(m, hob_height_of_box[i=1:length(boxes)] == boxes[i][3])
#Front-left coordinate of each box
@variable(m, flbx_front_left_bot_x[i=1:length(boxes)] >= 0)
@variable(m, flby_front_left_bot_y[i=1:length(boxes)] >= 0)
@variable(m, flbz_front_left_bot_z[i=1:length(boxes)] >= 0)
#Check if length (pi) is parallel to X, Y or Z-axis
@variable(m, lpx_length_of_box_parallel_to_x[i=1:length(boxes)], Bin)
@variable(m, lpy_length_of_box_parallel_to_y[i=1:length(boxes)], Bin)
@variable(m, lpz_length_of_box_parallel_to_z[i=1:length(boxes)], Bin)
#Check if width (qi) is parallel to X, Y or Z-axis
@variable(m, wpx_width_of_box_parallel_to_x[i=1:length(boxes)], Bin)
@variable(m, wpy_width_of_box_parallel_to_y[i=1:length(boxes)], Bin)
@variable(m, wpz_width_of_box_parallel_to_z[i=1:length(boxes)], Bin)
#Check if height (ri) is parallel to X, Y or Z-axis
@variable(m, hpx_height_of_box_parallel_to_x[i=1:length(boxes)], Bin)
@variable(m, hpy_height_of_box_parallel_to_y[i=1:length(boxes)], Bin)
@variable(m, hpz_height_of_box_parallel_to_z[i=1:length(boxes)], Bin)
# box_j_x_axis_cover = lpx_length_of_box_parallel_to_x[j]*lob_length_of_box[j] + wpx_width_of_box_parallel_to_x[j]*wob_width_of_box[j]
# box_j_y_axis_cover = lpy_length_of_box_parallel_to_y[j]*lob_length_of_box[j] + wpy_width_of_box_parallel_to_y[j]*wob_width_of_box[j]
@expressions(m, begin
box_x_axis_cover[i=1:length(boxes)],
(
m[:lpx_length_of_box_parallel_to_x][i]*fix_value(m[:lob_length_of_box][i]) +
m[:wpx_width_of_box_parallel_to_x][i]*fix_value(m[:wob_width_of_box][i])
)
box_y_axis_cover[i=1:length(boxes)],
(
m[:lpy_length_of_box_parallel_to_y][i]*fix_value(m[:lob_length_of_box][i]) +
m[:wpy_width_of_box_parallel_to_y][i]*fix_value(m[:wob_width_of_box][i])
)
end)
#Make objective available for the whole program. Later used in objective function.
@expression(
m, heights[i=1:length(boxes)], m[:flbz_front_left_bot_z][i] + fix_value(m[:hob_height_of_box][i])
)
@variable(m, H >= maxHeight >= 0)
@constraint(m, m[:maxHeight] .>= m[:heights])
return m
end
end
|
{
"alphanum_fraction": 0.7031188314,
"author": null,
"avg_line_length": 40.8548387097,
"converted": null,
"ext": "jl",
"file": null,
"hexsha": "132ea0ed52d08ad51cb24c10b5cd5b39232a5ed8",
"include": null,
"lang": "Julia",
"length": null,
"llama_tokens": null,
"mathlib_filename": null,
"max_forks_count": null,
"max_forks_repo_forks_event_max_datetime": null,
"max_forks_repo_forks_event_min_datetime": null,
"max_forks_repo_head_hexsha": "5d4a51598f1677c2f5c219a88ca9ab4c9b6a5c6f",
"max_forks_repo_licenses": [
"MIT"
],
"max_forks_repo_name": "ToralfFrich/Master_Thesis",
"max_forks_repo_path": "src/constraints/geometric/variables.jl",
"max_issues_count": null,
"max_issues_repo_head_hexsha": "5d4a51598f1677c2f5c219a88ca9ab4c9b6a5c6f",
"max_issues_repo_issues_event_max_datetime": null,
"max_issues_repo_issues_event_min_datetime": null,
"max_issues_repo_licenses": [
"MIT"
],
"max_issues_repo_name": "ToralfFrich/Master_Thesis",
"max_issues_repo_path": "src/constraints/geometric/variables.jl",
"max_line_length": 138,
"max_stars_count": null,
"max_stars_repo_head_hexsha": "5d4a51598f1677c2f5c219a88ca9ab4c9b6a5c6f",
"max_stars_repo_licenses": [
"MIT"
],
"max_stars_repo_name": "ToralfFrich/Master_Thesis",
"max_stars_repo_path": "src/constraints/geometric/variables.jl",
"max_stars_repo_stars_event_max_datetime": null,
"max_stars_repo_stars_event_min_datetime": null,
"num_tokens": 755,
"path": null,
"reason": null,
"repo": null,
"save_path": null,
"sha": null,
"size": 2533
}
|
"""
json 불러와서 캡션 붙이는 것
"""
import json
import pandas as pd
path = './datasets/vqa/v2_OpenEnded_mscoco_train2014_questions.json'
with open(path) as question:
question = json.load(question)
# question['questions'][0]
# question['questions'][1]
# question['questions'][2]
df = pd.DataFrame(question['questions'])
df
caption_path = './datasets/caption/vis_st_trainval.json'
with open(caption_path) as cap:
cap = json.load(cap)
df_cap = pd.DataFrame(cap)
df_cap
df_addcap = pd.merge(df, df_cap, how='left', on='image_id')
del df_addcap['file_path']
########################################################################################################################
"""
pandas to json
"""
df_addcap.to_json('./datasets/caption/train_cap2.json', orient='table')
with open('./datasets/caption/train_cap2.json') as train_cap:
train_cap = json.load(train_cap)
########################################################################################################################
########################################################################################################################
"""
answer + cap
"""
path = '/home/nextgen/Desktop/mcan-vqa/datasets/vqa/v2_mscoco_train2014_annotations.json'
path = './datasets/vqa/v2_mscoco_val2014_annotations.json'
with open(path) as answer:
answer = json.load(answer)
answer['annotations'][0]
df_ans = pd.DataFrame(answer['annotations'])
df_ans[:0]
del df_ans['question_type']
del df_ans['answers']
del df_ans['answer_type']
del df_ans['image_id']
df_ans[df_ans['question_id']==458752000]
df_addcap2 = pd.merge(df_addcap, df_ans, how='left', on='question_id')
df_addcap2[:0]
df_addcap2['multiple_choice_answer']
# del df_addcap['file_path']
df_addcap2.to_json('./datasets/caption/val_qacap.json', orient='table')
with open('./datasets/caption/train_qacap.json') as train_qacap:
train_qacap = json.load(train_qacap)
########################################################################################################################
"""val test도 마찬가지"""
path = './datasets/vqa/v2_OpenEnded_mscoco_val2014_questions.json'
with open(path) as question:
question = json.load(question)
df = pd.DataFrame(question['questions'])
df
caption_path = './datasets/caption/vis_st_trainval.json'
with open(caption_path) as cap:
cap = json.load(cap)
df_cap = pd.DataFrame(cap)
df_cap
df_addcap = pd.merge(df, df_cap, how='left', on='image_id')
df_addcap[:0]
del df_addcap['file_path']
df_addcap.to_json('./datasets/caption/val_cap.json', orient='table')
#test
path = './datasets/vqa/v2_OpenEnded_mscoco_test-dev2015_questions.json'
with open(path) as question:
question = json.load(question)
df = pd.DataFrame(question['questions'])
df
df['image_id'] = df.image_id.astype(int)
caption_path = './datasets/caption/vis_st_test.json'
with open(caption_path) as cap:
cap = json.load(cap)
df_cap = pd.DataFrame(cap)
df_cap
df_cap['image_id'] = df_cap.image_id.astype(int)
df_addcap = pd.merge(df, df_cap, how='left', on='image_id')
df_addcap[:0]
del df_addcap['file_path']
df_addcap.to_json('./datasets/caption/test_cap.json', orient='table')
########################################################################################################################
from core.data.ans_punct import prep_ans
import numpy as np
import en_vectors_web_lg, random, re, json
import json
from core.data.data_utils import ques_load
stat_ques_list = \
json.load(open('./datasets/caption/train_cap.json', 'r'))['data'] + \
json.load(open('./datasets/caption/val_cap.json', 'r'))['data'] + \
json.load(open('./datasets/caption/test_cap.json', 'r'))['data']
def tokenize(stat_ques_list, use_glove):
token_to_ix = {
'PAD': 0,
'UNK': 1,
}
spacy_tool = None
pretrained_emb = []
if use_glove:
spacy_tool = en_vectors_web_lg.load()
pretrained_emb.append(spacy_tool('PAD').vector)
pretrained_emb.append(spacy_tool('UNK').vector)
for ques in stat_ques_list:
words = re.sub(
r"([.,'!?\"()*#:;])",
'',
ques['question'].lower()
).replace('-', ' ').replace('/', ' ').split()
for word in words:
if word not in token_to_ix:
token_to_ix[word] = len(token_to_ix)
if use_glove:
pretrained_emb.append(spacy_tool(word).vector)
for ques in stat_ques_list:
words = re.sub(
r"([.,'!?\"()*#:;])",
'',
ques['caption'].lower()
).replace('-', ' ').replace('/', ' ').split()
for word in words:
if word not in token_to_ix:
token_to_ix[word] = len(token_to_ix)
if use_glove:
pretrained_emb.append(spacy_tool(word).vector)
pretrained_emb = np.array(pretrained_emb)
return token_to_ix, pretrained_emb
token_to_ix, pretrained_emb = tokenize(stat_ques_list, True)
#######################################################################################################################
# with open('./datasets/vqa/v2_mscoco_train2014_annotations.json') as answer:
# answer = json.load(answer)
#
# answer['annotations'][2]
"""
답을 이용하는거로 하면 train val 비교로해야 함
test셋은 답을 제공하지 않아서 test할 때 답을 이용하는 모델을 사용할 수 없음
"""
####
import cal_sim
import pandas as pd
with open('datasets/caption/train_cap.json') as train_cap:
train_cap = json.load(train_cap)
with open('datasets/caption/val_cap.json') as val_cap:
val_cap = json.load(val_cap)
with open('datasets/caption/test_cap.json') as test_cap:
test_cap = json.load(test_cap)
df_train = pd.DataFrame(train_cap['data'])
df_val = pd.DataFrame(val_cap['data'])
df_test = pd.DataFrame(test_cap['data'])
df_train[:0]
# df_train['similarity'] = cal_sim.sent_sim((df_train['question'], dtype=int32), (df_train['caption'], dtype=int32))
df_train.iloc[0]['question']
def txt2vec(sentence):
# s = sentence.split()
tt = []
new_i = re.sub(
r"([.,'!?\"()*#:;])",
'',
sentence.lower()
).replace('-', ' ').replace('/', ' ').split()
for i in new_i:
num = token_to_ix[i]
tt.append(pretrained_emb[num])
return tt
stat_ques_list[0]
token_to_ix['what']
len(txt2vec(df_train.iloc[0]['question']))
df_train.iloc[0]['question']
df_train.iloc[0]['caption']
len(txt2vec(df_train.iloc[0]['caption']))
from numpy import dot
from numpy.linalg import norm
import numpy as np
def cos_sim(A, B):
return dot(A, np.transpose(B)) / (norm(A) * norm(B))
def word_sim(w1,w2): #word simiarity
s = 0.5 * (1+ cos_sim(w1,w2))
return s
def sent_sim(ss1, ss2): #sentence simiarity
s1 = txt2vec(ss1)
s2 = txt2vec(ss2)
t = []
for i in s1[2:]: #question 0,1 are PAD, UNK
tmp = []
for j in s2[2:]: #caption
tmp_sim = word_sim(i,j)
tmp.append(tmp_sim)
t.append(max(tmp))
sentence_sim = sum(t) / len(s1[2:])
return sentence_sim
t = sent_sim('yes', 'hello')
tmp = sent_sim(df_train.iloc[105]['question'], df_train.iloc[103]['caption'])
t1 = sent_sim('Is there a travel guide on the table?', 'A place of cake and coffee are on an outdoor table')
t2 = sent_sim('yes', 'A place of cake and coffee are on an outdoor table')
t3 = sent_sim('no', 'no')
df_train.iloc[105]['question'] #유사도 좀 이상한 듯 너무 높게 나오는 것 같은느낌
df_train.iloc[103]['caption']
cos_sim(txt2vec('e'), txt2vec('z'))
new_i = re.sub(
r"([.,'!?\"()*#:;])",
'',
df_train.iloc[102]['question'].lower()
).replace('-', ' ').replace('/', ' ').split()
np.dot(txt2vec(df_train.iloc[103]['question']), txt2vec(df_train.iloc[103]['caption']))
|
{
"alphanum_fraction": 0.5889103804,
"author": null,
"avg_line_length": 26.7413793103,
"converted": null,
"ext": "py",
"file": null,
"hexsha": "8165627d6470d7c3af8974c3cff4f40b6476f57e",
"include": true,
"lang": "Python",
"length": null,
"llama_tokens": null,
"mathlib_filename": null,
"max_forks_count": null,
"max_forks_repo_forks_event_max_datetime": null,
"max_forks_repo_forks_event_min_datetime": null,
"max_forks_repo_head_hexsha": "788e21fc1bc712018166aa44cc3298264f493f3b",
"max_forks_repo_licenses": [
"Apache-2.0"
],
"max_forks_repo_name": "JoonseoKang/mcan-cap",
"max_forks_repo_path": "json_pandas.py",
"max_issues_count": null,
"max_issues_repo_head_hexsha": "788e21fc1bc712018166aa44cc3298264f493f3b",
"max_issues_repo_issues_event_max_datetime": null,
"max_issues_repo_issues_event_min_datetime": null,
"max_issues_repo_licenses": [
"Apache-2.0"
],
"max_issues_repo_name": "JoonseoKang/mcan-cap",
"max_issues_repo_path": "json_pandas.py",
"max_line_length": 120,
"max_stars_count": null,
"max_stars_repo_head_hexsha": "788e21fc1bc712018166aa44cc3298264f493f3b",
"max_stars_repo_licenses": [
"Apache-2.0"
],
"max_stars_repo_name": "JoonseoKang/mcan-cap",
"max_stars_repo_path": "json_pandas.py",
"max_stars_repo_stars_event_max_datetime": null,
"max_stars_repo_stars_event_min_datetime": null,
"num_tokens": 1967,
"path": null,
"reason": "import numpy,from numpy",
"repo": null,
"save_path": null,
"sha": null,
"size": 7755
}
|
# script to convert the newly generated Relative Humidity
def convert_to_hur( tas_arr, vap_arr ):
import numpy as np
with np.errstate( over='ignore' ):
esa_arr = 6.112 * np.exp( 17.62 * tas_arr/ (243.12 + tas_arr) )
# esa_arr = 6.112 * np.exp( 22.46 * tas_arr / (272.62 + tas_arr) )
return vap_arr/esa_arr * 100
def convert_to_vap( tas_arr, hur_arr ):
import numpy as np
with np.errstate( over='ignore' ):
esa_arr = 6.112 * np.exp( 17.62 * tas_arr / (243.12 + tas_arr) )
# esa_arr = 6.112 * np.exp( 22.46*tas_arr / (272.62 + tas_arr) )
return (hur_arr * esa_arr) / 100
def run( x ):
tas = rasterio.open( x[0] )
hur = rasterio.open( x[1] )
meta = tas.meta
meta[ 'dtype' ] = 'float32' # set it to float32
meta.update( compress='lzw' )
meta.pop( 'transform' )
tas_arr = tas.read( 1 )
hur_arr = hur.read( 1 )
vap_arr = convert_to_vap( tas_arr, hur_arr )
# mask it:
mask = tas.read_masks( 1 )
vap_arr[ mask == 0 ] = tas.nodata
# build an output filename from the input tas and write out -- changed to deal with pathing!
output_filename = x[1].replace( 'hur', 'vap' )
output_filename = output_filename.replace( '_metric_', '_hPa_' )
# output_filename = x[0].replace( 'tas', 'vap' )
# output_filename = output_filename.replace( '_C_', '_hPa_' )
dirname = os.path.dirname( output_filename )
try:
if not os.path.exists( dirname ):
os.makedirs( dirname )
except:
pass
with rasterio.open( output_filename, 'w', **meta ) as out:
out.write( vap_arr.astype( np.float32 ), 1 )
return output_filename
if __name__ == '__main__':
# import modules
import os, glob, rasterio
import numpy as np
from pathos import multiprocessing as mp
# args
ncores = 40
tas_input_path = '/workspace/Shared/Tech_Projects/ALFRESCO_Inputs/project_data/TEM_Data/cru_november_final/ar5'
hur_input_path = '/Data/malindgren/cru_november_final/ar5'
models = [ 'IPSL-CM5A-LR', 'GISS-E2-R', 'MRI-CGCM3', 'CCSM4', 'GFDL-CM3' ]
for model in models:
# print model
tas_files = sorted( glob.glob( os.path.join( tas_input_path, model, 'tas', 'downscaled', '*.tif' ) ) )
hur_files = sorted( glob.glob( os.path.join( hur_input_path, model, 'hur', 'downscaled', '*.tif' ) ) )
# combine the sorted lists which should now be in a common order...
tas_hur_list = zip( tas_files, hur_files )
# run in parallel
pool = mp.Pool( processes=ncores )
out = pool.map( run, tas_hur_list )
pool.close()
# def return_files( input_path, var ):
# output_files = []
# for root, subs, files in os.walk( input_path ):
# # # print root
# if root.endswith( 'downscaled' ) and len( files ) != 0 and var in root:
# pool = mp.Pool( processes=ncores )
# files = pool.map( lambda x: os.path.join( root, x ), files )
# pool.close()
# output_files.append( files )
# return output_files
|
{
"alphanum_fraction": 0.6729470316,
"author": null,
"avg_line_length": 32.7093023256,
"converted": null,
"ext": "py",
"file": null,
"hexsha": "80c61aacc21822c977f0758b8a90ad8a8e99498c",
"include": true,
"lang": "Python",
"length": null,
"llama_tokens": null,
"mathlib_filename": null,
"max_forks_count": 3,
"max_forks_repo_forks_event_max_datetime": "2021-05-25T03:46:00.000Z",
"max_forks_repo_forks_event_min_datetime": "2020-09-16T04:48:57.000Z",
"max_forks_repo_head_hexsha": "3fe8ea1774cf82149d19561ce5f19b25e6cba6fb",
"max_forks_repo_licenses": [
"MIT"
],
"max_forks_repo_name": "ua-snap/downscale",
"max_forks_repo_path": "snap_scripts/old_scripts/tem_iem_older_scripts_april2018/tem_inputs_iem/old_code/convert_tas_hur_to_vap.py",
"max_issues_count": 17,
"max_issues_repo_head_hexsha": "3fe8ea1774cf82149d19561ce5f19b25e6cba6fb",
"max_issues_repo_issues_event_max_datetime": "2017-04-17T20:57:02.000Z",
"max_issues_repo_issues_event_min_datetime": "2016-01-04T23:37:47.000Z",
"max_issues_repo_licenses": [
"MIT"
],
"max_issues_repo_name": "ua-snap/downscale",
"max_issues_repo_path": "snap_scripts/old_scripts/tem_iem_older_scripts_april2018/tem_inputs_iem/old_code/convert_tas_hur_to_vap.py",
"max_line_length": 112,
"max_stars_count": 5,
"max_stars_repo_head_hexsha": "3fe8ea1774cf82149d19561ce5f19b25e6cba6fb",
"max_stars_repo_licenses": [
"MIT"
],
"max_stars_repo_name": "ua-snap/downscale",
"max_stars_repo_path": "snap_scripts/old_scripts/tem_iem_older_scripts_april2018/tem_inputs_iem/old_code/convert_tas_hur_to_vap.py",
"max_stars_repo_stars_event_max_datetime": "2022-03-23T16:32:54.000Z",
"max_stars_repo_stars_event_min_datetime": "2020-06-24T21:55:12.000Z",
"num_tokens": 898,
"path": null,
"reason": "import numpy",
"repo": null,
"save_path": null,
"sha": null,
"size": 2813
}
|
[STATEMENT]
lemma closed_Union [continuous_intros, intro]: "finite S \<Longrightarrow> \<forall>T\<in>S. closed T \<Longrightarrow> closed (\<Union>S)"
[PROOF STATE]
proof (prove)
goal (1 subgoal):
1. \<lbrakk>finite S; \<forall>T\<in>S. closed T\<rbrakk> \<Longrightarrow> closed (\<Union> S)
[PROOF STEP]
by (induct set: finite) auto
|
{
"alphanum_fraction": null,
"author": null,
"avg_line_length": null,
"converted": null,
"ext": null,
"file": null,
"hexsha": null,
"include": null,
"lang": null,
"length": 1,
"llama_tokens": 119,
"mathlib_filename": null,
"max_forks_count": null,
"max_forks_repo_forks_event_max_datetime": null,
"max_forks_repo_forks_event_min_datetime": null,
"max_forks_repo_head_hexsha": null,
"max_forks_repo_licenses": null,
"max_forks_repo_name": null,
"max_forks_repo_path": null,
"max_issues_count": null,
"max_issues_repo_head_hexsha": null,
"max_issues_repo_issues_event_max_datetime": null,
"max_issues_repo_issues_event_min_datetime": null,
"max_issues_repo_licenses": null,
"max_issues_repo_name": null,
"max_issues_repo_path": null,
"max_line_length": null,
"max_stars_count": null,
"max_stars_repo_head_hexsha": null,
"max_stars_repo_licenses": null,
"max_stars_repo_name": null,
"max_stars_repo_path": null,
"max_stars_repo_stars_event_max_datetime": null,
"max_stars_repo_stars_event_min_datetime": null,
"num_tokens": null,
"path": null,
"reason": null,
"repo": null,
"save_path": null,
"sha": null,
"size": null
}
|
subroutine qqb_ttw_v(p,msqv)
************************************************************************
* Author: R. K. Ellis *
* March, 2012. *
* *
* Calculate the virtual matrix element squared for the process *
* *
* q(-p1) +qbar(-p2)= *
* +t(nu(p3)+e+(p4)+b(p5)) *
* +t~(b~(p6)+e^-(p7)+nu~(p8)) *
* +nu(p9) + e^+(p10) *
* *
* *
************************************************************************
implicit none
include 'constants.f'
include 'ckm.f'
include 'masses.f'
include 'sprods_com.f'
include 'zprods_com.f'
include 'qcdcouple.f'
include 'ewcouple.f'
include 'momwbbm.f'
include 'scheme.f'
include 'plabel.f'
integer j,k,nu,j1,j2,hb,hc
double precision p(mxpart,4),q(mxpart,4),msqv(-nf:nf,-nf:nf)
double precision qqb,qbq,dot
double precision fac,mQsq,s56,betasq
double complex a6treemm,a6treemp,a6treepm,a6treepp,
& mtop(2,2),manti(2,2),a61mm,a61mp,a61pm,a61pp,a61(2,2),a6(2,2),
& loqbq(2,2),hoqbq(2,2),loqqb(2,2),hoqqb(2,2)
logical numcheck
common/numcheck/numcheck
!$omp threadprivate(/numcheck/)
scheme='dred'
do j=-nf,nf
do k=-nf,nf
msqv(j,k)=0d0
enddo
enddo
c--- set the following flag to true to write out values of different primitives
c--- (a similar flag, to write out coefficients of the basis integrals,
c--- can be found in the routine a61mass)
numcheck=.false.
c--- setup for performing check against numerical evaluation
if (numcheck) then
c----- read in special point
include 'MCFMpoint.f'
c call writeout(p)
c pause
c--- perform the usual business to rotate away from the z-direction
do j=1,6
q(j,4)=p(j,4)
q(j,1)=p(j,3)
q(j,2)=-p(j,2)
q(j,3)=p(j,1)
do k=1,4
p(j,k)=q(j,k)
enddo
enddo
endif
do nu=1,4
q(1,nu)=p(1,nu)
q(2,nu)=p(2,nu)
q(3,nu)=p(9,nu)
q(4,nu)=p(10,nu)
q(5,nu)=p(3,nu)+p(4,nu)+p(5,nu)
q(6,nu)=p(6,nu)+p(7,nu)+p(8,nu)
enddo
mQsq=mt**2
c--- construct the massless momenta a la Rodrigo
do j=1,4
do nu=1,4
mom(j,nu)=q(j,nu)
enddo
enddo
s56=2d0*dot(q,5,6)+2d0*mQsq
betasq=1d0-4d0*mQsq/s56
if (betasq .ge. 0d0) then
bp=0.5d0*(1d0+dsqrt(betasq))
bm=1d0-bp
else
write(6,*) 'betasq < 0 in qqb_ttw_v.f, betasq=',betasq
call flush(6)
stop
endif
do nu=1,4
mom(5,nu)=(bp*q(5,nu)-bm*q(6,nu))/dsqrt(betasq)
mom(6,nu)=(bp*q(6,nu)-bm*q(5,nu))/dsqrt(betasq)
enddo
call tdecayrod(p,3,4,5,6,7,8,0,mtop)
call adecayrod(p,3,4,5,6,7,8,0,manti)
c--- compute spinor products
call spinoru(6,mom,za,zb)
c--- overall factor
fac=V*gsq**2*gwsq**6*aveqq/(mt*twidth)**4
fac=fac*xn*ason2pi
fac=fac*s(3,4)**2/((s(3,4)-wmass**2)**2+(wmass*wwidth)**2)
c--- include factor for hadronic decays of W
if (plabel(3) .eq. 'pp') fac=2d0*xn*fac
if (plabel(7) .eq. 'pp') fac=2d0*xn*fac
c--- QBQ: compute 1-loop and tree amplitudes
call a61mass(1,6,5,2,4,3,mQsq,a61mm,a61mp,a61pm,a61pp,
& a6treemm,a6treemp,a6treepm,a6treepp)
a61(1,1)=a61mm
a61(1,2)=a61mp
a61(2,1)=a61pm
a61(2,2)=a61pp
a6(1,1)=a6treemm
a6(1,2)=a6treemp
a6(2,1)=a6treepm
a6(2,2)=a6treepp
qbq=0d0
do hb=1,2
do hc=1,2
hoqbq(hb,hc)=czip
loqbq(hb,hc)=czip
do j1=1,2
do j2=1,2
loqbq(hb,hc)=loqbq(hb,hc)+mtop(hb,j1)*a6(j1,j2)*manti(j2,hc)
hoqbq(hb,hc)=hoqbq(hb,hc)+mtop(hb,j1)*a61(j1,j2)*manti(j2,hc)
enddo
enddo
qbq=qbq+fac*dble(loqbq(hb,hc)*dconjg(hoqbq(hb,hc)))
enddo
enddo
c--- put a pause here when writing out primitives
c if (numcheck) pause
c--- QQB: compute 1-loop and tree amplitudes
call a61mass(2,6,5,1,4,3,mQsq,a61mm,a61mp,a61pm,a61pp,
& a6treemm,a6treemp,a6treepm,a6treepp)
a61(1,1)=a61mm
a61(1,2)=a61mp
a61(2,1)=a61pm
a61(2,2)=a61pp
a6(1,1)=a6treemm
a6(1,2)=a6treemp
a6(2,1)=a6treepm
a6(2,2)=a6treepp
qqb=0d0
do hb=1,2
do hc=1,2
hoqqb(hb,hc)=czip
loqqb(hb,hc)=czip
do j1=1,2
do j2=1,2
loqqb(hb,hc)=loqqb(hb,hc)+
& mtop(hb,j1)*a6(j1,j2)*manti(j2,hc)
hoqqb(hb,hc)=hoqqb(hb,hc)+
& mtop(hb,j1)*a61(j1,j2)*manti(j2,hc)
enddo
enddo
qqb=qqb+fac*dble(loqqb(hb,hc)*dconjg(hoqqb(hb,hc)))
enddo
enddo
do j=-nf,nf
do k=-nf,nf
if ((j .gt. 0) .and. (k .lt. 0)) then
msqv(j,k)=Vsq(j,k)*qqb
elseif ((j .lt. 0) .and. (k .gt. 0)) then
msqv(j,k)=Vsq(j,k)*qbq
endif
enddo
enddo
return
end
|
{
"alphanum_fraction": 0.4778597786,
"author": null,
"avg_line_length": 29.2972972973,
"converted": null,
"ext": "f",
"file": null,
"hexsha": "c6115fb56877b523ecd6d242c190f5c189376213",
"include": null,
"lang": "FORTRAN",
"length": null,
"llama_tokens": null,
"mathlib_filename": null,
"max_forks_count": 19,
"max_forks_repo_forks_event_max_datetime": "2021-07-06T10:04:40.000Z",
"max_forks_repo_forks_event_min_datetime": "2015-05-04T22:15:41.000Z",
"max_forks_repo_head_hexsha": "80da31668d7b7eb5b02bb4cac435562c45075d24",
"max_forks_repo_licenses": [
"Apache-2.0"
],
"max_forks_repo_name": "tmartini/JHUGen",
"max_forks_repo_path": "MCFM-JHUGen/src/TopW/qqb_ttw_v.f",
"max_issues_count": 64,
"max_issues_repo_head_hexsha": "80da31668d7b7eb5b02bb4cac435562c45075d24",
"max_issues_repo_issues_event_max_datetime": "2022-01-25T04:59:32.000Z",
"max_issues_repo_issues_event_min_datetime": "2015-06-24T15:08:17.000Z",
"max_issues_repo_licenses": [
"Apache-2.0"
],
"max_issues_repo_name": "tmartini/JHUGen",
"max_issues_repo_path": "MCFM-JHUGen/src/TopW/qqb_ttw_v.f",
"max_line_length": 79,
"max_stars_count": 3,
"max_stars_repo_head_hexsha": "80da31668d7b7eb5b02bb4cac435562c45075d24",
"max_stars_repo_licenses": [
"Apache-2.0"
],
"max_stars_repo_name": "tmartini/JHUGen",
"max_stars_repo_path": "MCFM-JHUGen/src/TopW/qqb_ttw_v.f",
"max_stars_repo_stars_event_max_datetime": "2020-09-04T19:59:36.000Z",
"max_stars_repo_stars_event_min_datetime": "2015-06-08T13:09:28.000Z",
"num_tokens": 2050,
"path": null,
"reason": null,
"repo": null,
"save_path": null,
"sha": null,
"size": 5420
}
|
# coding: utf-8
from scipy import stats
import numpy as np
from itertools import chain
from scipy.stats import chi2_contingency
import jpegio as jio
import collections
img = jio.read('00576.jpg')
g = img.coef_arrays[0]
g = g.reshape(g.shape[0]*g.shape[1])
for ind in range(30):
g1 = g[0.03*len(g)*i:0.03*len(g)*(i+1)]
num = collections.Counter(g)
deg, cnt = zip(*num.items())
print(deg)
print(cnt)
t = 2**11
pairnum = int(t/2)
print(pairnum)
y = np.ones((pairnum, 1))
yy = np.ones((pairnum, 1))
deg = list(deg)
cnt = list(cnt)
o = []
for i in range(-1024, 1023, 2):
j = int(i/2)
if i in deg:
add = deg.index(i)
h1 = cnt[add]
else:
h1 = 0
if i+1 in deg:
add = deg.index(i+1)
h2 = cnt[add]
else:
h2 = 0
if h1+h2 > 0:
y[j] = (h1+h2)/2.0
yy[j] = h1
o.append([h1, h2])
else:
t = t-2
print(o)
t, p = stats.chisquare(yy, f_exp=y)
print(t)
print(p)
chi2, p, dof, ex = chi2_contingency(o, correction=False)
print(chi2)
print(p)
|
{
"alphanum_fraction": 0.5054667788,
"author": null,
"avg_line_length": 19.8166666667,
"converted": null,
"ext": "py",
"file": null,
"hexsha": "a172f44e60ace4b62441e510d614e2772206cd77",
"include": true,
"lang": "Python",
"length": null,
"llama_tokens": null,
"mathlib_filename": null,
"max_forks_count": null,
"max_forks_repo_forks_event_max_datetime": null,
"max_forks_repo_forks_event_min_datetime": null,
"max_forks_repo_head_hexsha": "b0f861a02ac222656a0c68ad01c904172f52afcd",
"max_forks_repo_licenses": [
"MIT"
],
"max_forks_repo_name": "everange-ustc/ImageSteganalysis",
"max_forks_repo_path": "chi_square_test.py",
"max_issues_count": null,
"max_issues_repo_head_hexsha": "b0f861a02ac222656a0c68ad01c904172f52afcd",
"max_issues_repo_issues_event_max_datetime": null,
"max_issues_repo_issues_event_min_datetime": null,
"max_issues_repo_licenses": [
"MIT"
],
"max_issues_repo_name": "everange-ustc/ImageSteganalysis",
"max_issues_repo_path": "chi_square_test.py",
"max_line_length": 60,
"max_stars_count": null,
"max_stars_repo_head_hexsha": "b0f861a02ac222656a0c68ad01c904172f52afcd",
"max_stars_repo_licenses": [
"MIT"
],
"max_stars_repo_name": "everange-ustc/ImageSteganalysis",
"max_stars_repo_path": "chi_square_test.py",
"max_stars_repo_stars_event_max_datetime": null,
"max_stars_repo_stars_event_min_datetime": null,
"num_tokens": 396,
"path": null,
"reason": "import numpy,from scipy",
"repo": null,
"save_path": null,
"sha": null,
"size": 1189
}
|
\documentclass{article}
\usepackage{bm}
\usepackage{amsmath}
\usepackage{graphicx}
\usepackage{mdwlist}
\usepackage[colorlinks=true]{hyperref}
\usepackage{geometry}
\geometry{margin=1in}
\geometry{headheight=2in}
\geometry{top=1in}
\usepackage{palatino}
\usepackage{listings}
\usepackage{color}
\definecolor{codegreen}{rgb}{0,0.6,0}
\definecolor{codegray}{rgb}{0.5,0.5,0.5}
\definecolor{codepurple}{rgb}{0.58,0,0.82}
\definecolor{backcolour}{rgb}{0.95,0.95,0.92}
\lstdefinestyle{mystyle}{
backgroundcolor=\color{backcolour},
commentstyle=\color{codegreen},
keywordstyle=\color{magenta},
numberstyle=\tiny\color{codegray},
stringstyle=\color{codepurple},
basicstyle=\footnotesize,
breakatwhitespace=false,
breaklines=true,
captionpos=b,
keepspaces=true,
numbers=left,
numbersep=5pt,
showspaces=false,
showstringspaces=false,
showtabs=false,
tabsize=2
}
\lstset{style=mystyle}
%\renewcommand{\rmdefault}{palatino}
\usepackage{fancyhdr}
%\pagestyle{fancy}
\rhead{}
\lhead{}
\chead{%
{\vbox{%
\vspace{4mm}
\large
Statement of Purpose \hfill
\\
Seoul National University
\\[2mm]
\textbf{GAO ZHIYUAN}
}
}
}
\usepackage{paralist}
\usepackage{todonotes}
\setlength{\marginparwidth}{2.15cm}
\usepackage{tikz}
\usetikzlibrary{positioning,shapes,backgrounds}
\begin{document}
\pagestyle{fancy}
%!TEX root = hw1.tex
%% Q1
\section{In relation to your academic interest and personal experiences, please describe your motivation for your desired course. You may include information related to your preparation for the course and related academic achievements. Please state your goals while studying at Seoul National University as well as your study plan(4000 bytes limit)}
I am applying to the undergraduate program in Computer Science and Engineering with a strong motivation into research. I have been an exchange student in Seoul National University for a year and I believe it would be an enriching experience if I enroll as an undergraduate. \\
\\
I believe computer science could make a concrete and tangible impact to our surroundings. In 2015, I first enrolled in department of political science in Taiwan. The philosophy and paradigm that political science focuses were fascinating. Immersion in those intellectual challenges could always be rigorous and diverting. \\
\\
However, I turned to computer science when I started a voluneering community in Taiwan, where we devoted to computer education in Malaysian aboriginal community. In case of extreme poverty in the tropical jungles, computer proves to be feasible as a method of education. The community continues to grow and it also earned me an opportunity as a speaker for openSUSE Asia Summit in Japan, 2017. I was obsessed with what computer science could achieve and that seeded computer science in my mind as a profession. \\
\\
I believe Computer Science is my vocation. I started programming at university but I was a fast learner. I become better acquainted with programming on STM32 board with cortex-M core. On a basic level, we had to set up a server on RTOS, to monitor the soil humidity and to broadcast collected data to all connected devices. I navigated the datasheets and careful considerations had to be taken in order that the sensor is activated acurately with exact timing controls. It was first time to get to have a basic idea of GPIO, SPI and other hardware components. And I found myself greatly immersed in the intense progress of tweaking and developing a program. \\
\\
SNU's computer science undergradute program looms large in my mind, largely because of its interdisciplinary approach towards computer science. I was especially interested in the computer system related courses, such as compiler, system programming and computer architecture. My dedication to programming in Taiwan earned me the opportunity to exchange in Seoul National University for a year. I make a compiler from scratch in compiler class and hence we had the chance to know in detail what compiler consists of how these large projects should be organized. In addition, operating system also greatly benefited me, with intensive linux kernel projects that I am using on my resume. I would hope that I have taken more courses when I was exchanging, since Hardware system design, Principles and Practices of Software Development also greatly draw my interest. Lectures in department of Computer Science give me a glimmer of a higher realm. Moments of these great intensity were intoxicating that I believe 4 years as an undergraduate would grant me glittering opportunities for a leap forward. \\
\\
As for a long term goal, I would like to be a researcher and I found myself specially interested in compiler backend for deep neural networks. We have seen a huge step deep learning has gained in past decades along with various applications into embedded devices, and thus efficiency and portibility of neural network are gradually coming into discussion. Optimizing neural network with respect to its compiler, in terms of code generation and instruction scheduling, explores a provocative but as of yet scarcely studied field of compiler optimization. I was tremendously inspired by darknet, a deep learning framework written in C, and when I embedded tiny-yolov3 on Rasberry Pi with darknet, I also found out they have a bug towards parsing models. In brief, my research would relate to improve deep learning frameworks with respect to compilation techniques.\\
\\
I am a language fast learner. My growing skills in Korean, and my fluency in English, Japanese and Chinese make me believe that I am able to quickyly fit in the classes here, where most major courses in Computer Science department are in English, and that very shortly I would comprehense Korean necessities for daily communication and lectures. \\
\\
\section{Please briefly state your academic and extracurricular activities(4000 bytes limit)}
I demonstrated a good aptitude of science since high school, when I scored highest in math in the chinese university entrance exam, which is considered to be the most influential, or the only factor that universities evaluate applicants and which is extremely competitive. \\
\\
I demonstrated a good aptitude of science since high school, when I scored highest in math in the Chinese university entrance exam, which is considered to be the most influential, or the only factor that universities evaluate applicants and which is extremely competitive. In addition, I was the conductor of high school orchestra and I hold national certificate for trombone and tuba at the highest level. \\
\\
I am also the founder of a volunteer community in Taiwan. We have been contributing to malaysian aboriginal communities, building up connection between Taiwan aboriginal voluneers and malaysian communities, with continuous sponsorship from ASUS, the community is fast growing in the belief that computer education is having impact on their life. Fortunately, this experience earned me the chance to attend openSUSE Asia Summit 2017 as a speaker, which is one of the largest open source conferences and exchange ideas with top developers in computer science. \\
\\
I am also the captain of the university speed roller club in Taiwan, where routinely we have been holding local competitions and collaborate with other universities. \\
\\
I was an intern at Computer Systems and Platforms Laboratory in Seoul National University. I aimed at proposing a model to compare and detect source code plagiarism based on abstract syntax trees that I learnt from compiler class in SNU. I researched into numerous previous works and papers, which greatly inspired my implementation. Mingling different program scopes' conformance became a tradeoff, but eventually, I succeeded in drawing an equation which would retain a satisfying judgement with respect to source code plagiarism.\\
\\
In addition, I am also an paid intern at City Energy Lab in GSES, Seoul National University. I am contributing to a software refactoring, which would integrate and improve two existing weather simulation softwares---SURFEX, and UWG. The code base appears to be large and demanding, but I'm gradually gaining comprehension of the software architecture and realize that the efficiency of the software can possibly be improved to a large scale. \\
\\
Furthermore, I have been offered an paid intership opportunity at DYSK Labs, Taiwan, for the upcoming six months as a computer vision engineer. I am expecting my deep learning knowledge to be further developed and that could be benefitial to my long term research goal. \\
\\
My programming skills also me a remote paid work for Success Factors, a company based in Spain, with respect to linux security and rootkit prevention. \\
\\
\section{Please write about yourself with regard to your characteristics other than your record of academic achievement. This section is provided to illustrate the personal aspects of each applicant. The following contents may be included in this section, though this section is not limited to them; experiences which have been influential in your life, individual perspectives on current issues, or role models or figures you respect(4000 bytes limit)}
I am a hacker, a thinker, a tinkerer, and a wonderer. \\
\\
Firstly, I was tremendously uplifted by Linus Torvalds, whose Linux kernel is one of the largest, best-known, and most widely-used open source project. It formed the very base of most modern operating system distributions and linux is still the largest place holder in embedded system and server market. With rapt attention and religious devotation, two years ago I dived down the source code for the first time, tracing down the red black tree and trying to understand the data structures. Hereafter Linux kernel became my stepstone onto other glittering opportunities. I made a couple of diverting implementations which drew interests of several recruiters and it also helped me find several opportunities as a programmer and served as something that I could capitalize upon. Linux kernel taught me the code philosophy which retains both elegancy and pragmaticism. It lit up my obession for programming, with great care of beauty, security, pragmatism and organization. \\
\\
Experience as a backpacker is the second thing that shaped me from bottom up. For 45 days I was striving to enjoy my survival in India, with wild dogs sleeping by my side from time to time and daily life elbowing myself onto every transportation. Things went thither when exposed to insecurity, fear, and pure excitement out of a new atmosphere. I started to wonder toward the summits of existence and how we become morally valuable, that each of us worthy of dignity and respect. And I started to see the beauty in people who were so open hearted though they did not make any pleasant offer. It provoked my yearning for ideals, and that it is the arduous journey that I am strained to take.\\
\\
Thirdly, exchange at Seoul National University penetrates the yearning more deeply into me, that I am destined to be in pursuit of excellence, with honest and unironic hunger for a prudent foresight into computer science. I was given a glimpse for mountaineering here, that the truth is my light. We tend to focus on the cutting-edge research issues along with the chanllenging assignments. Every time I take a class, it feels like an adventure into a fascinating realm in profusion. This exchange experience drove me to make a vital choice, to enroll in Seoul National University as an undergraduate and to be shaped from bottom up. \\
\end{document}
|
{
"alphanum_fraction": 0.79400017,
"author": null,
"avg_line_length": 98.8823529412,
"converted": null,
"ext": "tex",
"file": null,
"hexsha": "226754f4afb46db6040c6bc2fbb6ecca1c58abe5",
"include": null,
"lang": "TeX",
"length": null,
"llama_tokens": null,
"mathlib_filename": null,
"max_forks_count": null,
"max_forks_repo_forks_event_max_datetime": null,
"max_forks_repo_forks_event_min_datetime": null,
"max_forks_repo_head_hexsha": "d96f3ad7743d68447b835d411a732f865cfe6ac1",
"max_forks_repo_licenses": [
"MIT"
],
"max_forks_repo_name": "alapha23/resume",
"max_forks_repo_path": "past_resume/sop.tex",
"max_issues_count": null,
"max_issues_repo_head_hexsha": "d96f3ad7743d68447b835d411a732f865cfe6ac1",
"max_issues_repo_issues_event_max_datetime": null,
"max_issues_repo_issues_event_min_datetime": null,
"max_issues_repo_licenses": [
"MIT"
],
"max_issues_repo_name": "alapha23/resume",
"max_issues_repo_path": "past_resume/sop.tex",
"max_line_length": 1099,
"max_stars_count": null,
"max_stars_repo_head_hexsha": "d96f3ad7743d68447b835d411a732f865cfe6ac1",
"max_stars_repo_licenses": [
"MIT"
],
"max_stars_repo_name": "alapha23/resume",
"max_stars_repo_path": "past_resume/sop.tex",
"max_stars_repo_stars_event_max_datetime": null,
"max_stars_repo_stars_event_min_datetime": null,
"num_tokens": 2456,
"path": null,
"reason": null,
"repo": null,
"save_path": null,
"sha": null,
"size": 11767
}
|
End of preview. Expand
in Data Studio
The original EleutherAI/proof-pile-2 dataset uses a custom python script and .jsonl.zst files, which some versions of the datasets library struggle with.
This dataset contains the same data, subsets, and splits as EleutherAI/proof-pile-2, converted into standard parquet format.
Each subset and split was also shuffled so that you can directly train on the data without issue.
Conversion was performed using the following script:
import os
import zstandard as zstd
import json
import pandas as pd
from tqdm import tqdm
import datasets
import huggingface_hub as hf
DATA_URL = "EleutherAI/proof-pile-2"
SUBSETS = [
"algebraic-stack",
"arxiv",
"open-web-math"
]
SPLITS = [
"train",
"validation",
"test"
]
LOCAL_DIR = "./local_data/proof-pile-2"
OUT_URL = 'aklein4/proof-pile-2-fixed'
def download_data(
url: str,
subset: str,
split: str,
):
hf.snapshot_download(
repo_id=url,
repo_type="dataset",
allow_patterns=[f"{subset}/{split}/*"],
local_dir=LOCAL_DIR,
)
return os.path.join(LOCAL_DIR, subset, split)
def format_data(
url: str,
subset: str ,
split: str,
):
# download the data
folder = download_data(url, subset, split)
# get all files in the local dir
data_files = [
os.path.join(folder, f)
for f in os.listdir(folder)
if f.endswith(".zst")
]
# read all of the .jsonl.zst files
examples = []
for file_path in tqdm(data_files):
with zstd.open(open(file_path, "rb"), "rt", encoding="utf-8") as f:
for x in f.readlines():
examples.append(json.loads(x))
# get the dataset
df = pd.DataFrame(examples)
dataset = datasets.Dataset.from_pandas(df)
dataset = dataset.shuffle(seed=42)
dataset.push_to_hub(
OUT_URL,
config_name=subset,
split=split,
private=False
)
def main():
for subset in SUBSETS:
for split in SPLITS:
format_data(
DATA_URL,
subset,
split,
)
if __name__ == "__main__":
main()
- Downloads last month
- 46
