hexsha
stringlengths
40
40
size
int64
5
1.04M
ext
stringclasses
6 values
lang
stringclasses
1 value
max_stars_repo_path
stringlengths
3
344
max_stars_repo_name
stringlengths
5
125
max_stars_repo_head_hexsha
stringlengths
40
78
max_stars_repo_licenses
sequencelengths
1
11
max_stars_count
int64
1
368k
max_stars_repo_stars_event_min_datetime
stringlengths
24
24
max_stars_repo_stars_event_max_datetime
stringlengths
24
24
max_issues_repo_path
stringlengths
3
344
max_issues_repo_name
stringlengths
5
125
max_issues_repo_head_hexsha
stringlengths
40
78
max_issues_repo_licenses
sequencelengths
1
11
max_issues_count
int64
1
116k
max_issues_repo_issues_event_min_datetime
stringlengths
24
24
max_issues_repo_issues_event_max_datetime
stringlengths
24
24
max_forks_repo_path
stringlengths
3
344
max_forks_repo_name
stringlengths
5
125
max_forks_repo_head_hexsha
stringlengths
40
78
max_forks_repo_licenses
sequencelengths
1
11
max_forks_count
int64
1
105k
max_forks_repo_forks_event_min_datetime
stringlengths
24
24
max_forks_repo_forks_event_max_datetime
stringlengths
24
24
content
stringlengths
5
1.04M
avg_line_length
float64
1.14
851k
max_line_length
int64
1
1.03M
alphanum_fraction
float64
0
1
lid
stringclasses
191 values
lid_prob
float64
0.01
1
11950a823813531eb67b5c3eef900f8c63d9af1a
8,765
md
Markdown
content/blog/Docker/practice6.md
tghyyhjuujki/blog
41fa3e62b850187c391f461bbb615cd51a8e9f6d
[ "MIT" ]
null
null
null
content/blog/Docker/practice6.md
tghyyhjuujki/blog
41fa3e62b850187c391f461bbb615cd51a8e9f6d
[ "MIT" ]
2
2020-08-30T16:04:32.000Z
2021-09-07T07:08:37.000Z
content/blog/Docker/practice6.md
tghyyhjuujki/blog
41fa3e62b850187c391f461bbb615cd51a8e9f6d
[ "MIT" ]
null
null
null
--- title: Docker) Stack/Swarm 연습 date: 2020-08-07 category: Docker draft: false --- # Docker ## STACK 하나 이상의 서비스를 그룹으로 묶은 단위 briget network : 하나의 호스트에서만 작동하는 네트워크 overlay network : 여러 호스트 사이에서 작동하는 네트워크, 스택을 사용해 배포된 서비스 그룹이 여기에 속함 <br/> ### overlay network 생성 ```powershell $ docker network create --driver=overlay --attachable ch03 # ch03이라는 overlay network 생성 ``` 기존에 있던 registry 삭제하고 yml 실행 ```powershell $ docker service rm my-nginx # 레지스트리 삭제(이름이 중복됨) $ mkdir ingress $ cd ingress $ mkdir simple-web ``` 만든 simple-web에서 다음 파일 생성 ```powershell $ vi Dockerfile #아래 내용 FROM node:slim EXPOSE 8000 COPY hello.js . CMD node hello.js $ vi hello.js # 아래 내용 var http = require('http'); var os = require("os"); var hostname = os.hostname(); var content = function(req, resp) { var ip = req.headers['x-forwarded-for'] || req.connection.remoteAddress || req.socket.remoteAddress || req.connection.socket.remoteAddress; resp.end("Hello Docker and Swarm - " + ip + ", " + hostname); resp.writeHead(200); } var w = http.createServer(content); w.listen(8000); ``` ingress로 와서 파일 생성 ```powershell $ docker-compose.yml version: "3" services: registry: container_name: registry image: registry:latest ports: - 5000:5000 volumes: - "./registry-data:/var/lib/registry" $ vi ch03-webapi-stack.yml version: "3" services: nginx: image: gihyodocker/nginx-proxy deploy: replicas: 3 # 레플리카 수 placement: constraints: [node.role != manager] # 매니저가 아닌곳에 설치하겠다 environment: BACKEND_HOST: simple-web_api:8000 # 앞에 있는게 우리가 만들 서비스 이름, simple-web은 스택이름 api는 서비스이름이다. 뒤엔 실행될 포트번호 depends_on: - api # api가 실행되어야 nginx를 실행하겠다는 의미 networks: - ch03 ports: - 80:80 api: image: manager:5000/example/simple-web:latest # api라는 서비스는 이 이미지를 갖고 만들어짐 deploy: replicas: 3 placement: constraints: [node.role != manager] networks: - ch03 networks: ch03: external: true ``` 현재 갖고있는 파일 및 폴더 ![image-20200720113753500](practice6.assets/image-20200720113753500.png) 이미지 생성, 레지스트리 등록 ```powershell $ docker build -t simple-web:latest . # 도커파일 기반 이미지 생성 $ docker tag simple-web manager:5000/example/simple-web # example/simple-web 기반localhost:5000/example/simple-web 라는 이름의 이미지 생성 $ docker push manager:5000/example/simple-web # 레지스트리에 등록 ``` 스택 생성 ```powershell $ docker stack deploy -c ch03-webapi-stack.yml simple-web ``` 스택이름(simple-web) + 이름(nginx, api)으로 서비스가 만들어지는 것을 확인할 수 있다 ![image-20200720113005083](practice6.assets/image-20200720113005083.png) ```powershell $ docker stack ls # 서비스 목록 보기 $ docker stack services simple-web # 서비스별 확인 $ docker stack ps simple-web # 이름으로 상세확인 $ docker service ps simple-web_ # 서비스별 이름으로 확인 ``` 이중화 테스트 ```powershell $ curl -X GET http://192.168.56.13:80 # 실행할 때마다 node1~3에 있는 적당한 리소스를 찾아 할당해주는 것을 확인할 수 있다 ``` ### Visualizer 설치 ```powershell # ingress에서 $ vi visualizer.yml version: "3" services: app: image: dockersamples/visualizer ports: - "9000:8080" # 9000 들어오면 8080으로 리턴 volumes: - /var/run/docker.sock:/var/run/docker.sock deploy: mode: global placement: constraints: [node.role == manager] # 매니저에 설치하라는 뜻 $ docker stack deploy -c visualizer.yml visualizer # visualizer 스택 생성 [파일이름] [만들이름] ``` ### 이제 [localhost:49000](http://localhost:49000)에 접속하면 비주얼라이저 확인 가능 --- ### 문제 ```dockerfile FROM ubuntu:latest RUN apt-get update && apt-get install -y -q nginx COPY index.html /usr/share/nginx/html/ CMD ["nginx", "-g", "daemon off;"] ``` 1. 위 dockerfile을 이미지화 하고, private registry(my-web)에 업로드(push) 2. docker swarm에 배포 replicas 4개로 해서 배포 모든 my-web은 nginx-proxy를 통해 접속되도록 수정 my-web서비스는 replicas 4개 nginx-proxy 서비스는 replicas 2개 풀이, private repository니까 node1~3에서 이미지 빌드하거나 manager에서 하거나 상관없음 ```powershell $ docker build -t test . $ docker tag test manager:5000/example/test $ vi ch03-webapi-stack.yml version: "3" services: nginx: image: gihyodocker/nginx-proxy deploy: replicas: 2 # 레플리카 수 placement: constraints: [node.role != manager] # 매니저가 아닌곳에 설치하겠다 environment: BACKEND_HOST: test_my-web:80 # 앞에 있는게 우리가 만들 서비스 이름, simple-web은 스택이름 api는 서비스이름이다. 뒤엔 실행될 포트번호 depends_on: - my-web # api가 실행되어야 nginx를 실행하겠다는 의미 networks: - ch03 ports: - 80:80 my-web: image: manager:5000/example/test:latest # api라는 서비스는 이 이미지를 갖고 만들어짐 deploy: replicas: 4 placement: constraints: [node.role != manager] networks: - ch03 networks: ch03: external: true $ docker push manager:5000/example/test ``` --- ## Swarm을 이용한 실전 애플리케이션 개발 manager에서 ```powershell $ yum install -y tree # 설치하면 현재디렉토리 아래 속한 트리 나옴 $ tree . ├── etc │   └── mysql │   ├── conf.d │   └── mysql.conf.d └── sql ``` 이제 깃에서 받은 tododb 디렉토리 밑의 내용을 모두 복사한다 ![image-20200720170750826](practice6.assets/image-20200720170750826.png) todo-mysql.yml파일을 바꿔준다 ```yml todo-mysql.yml version: "3" services: master: image: registry:5000/ch04/tododb:latest deploy: replicas: 1 placement: constraints: [node.role != manager] environment: MYSQL_ROOT_PASSWORD: gihyo MYSQL_DATABASE: tododb MYSQL_USER: gihyo MYSQL_PASSWORD: gihyo MYSQL_MASTER: "true" networks: - todoapp networks: todoapp: external: true ``` 빌드 및 푸시 ```powershell $ docker build -t tododb . $ docker push manager:5000/example/tododb $ curl -X GET http://manager:5000/v2/_catalog $ docker network create --driver=overlay --attachable todoapp $ docker stack deploy -c todo-mysql.yml todo-mysql # 근데 에러가 발생한다 $ docker service logs [컨테이너ID] # 오류로그 확인 todo-mysql_master.1.e6aoqpodxskl@node3 | !! exec: "add-server-id.sh": executable file not found in $PATH # 파일을 찾을 수 없다고 뜸 >> 이미지를 직접 실행시켜보자 # 역시 에러가 발생한다. Dockerfile의 entryPoint 부분을 주석으로 막고 다시 빌드 $ docker build --no-cache -t tododb . # --no-cache는 캐시 삭제, 즉 이전 빌드에서 생성된 캐시를 사용하지 않음. Docker는 이미지 생성 시간을 줄이기 위해서 Dockerfile의 각 과정을 캐시하는데, 이 캐시를 사용하지 않고 처음부터 다시 이미지를 생성. $ docker tag tododb manager:5000/example/tododb # 태그 붙이고 $ docker push manager:5000/example/tododb # 레지에 푸시 # 또 에러발생하면 로그 확인하고 mysqld.conf 맨 밑에 다음 내용 추가 server-id=1 # 다시 위 build 명령어 실행하고 run $ docker run -d -p 3306:3306 -e MYSQL_ALLOW_EMPTY_PASSWORD=true manager:5000/example/tododb ``` 그럼 또 에러뜸 ![image-20200722102200635](practice6.assets/image-20200722102200635.png) 마스터를 찾을 수 없다고 뜬다. prepare.sh 파일에 나타나있는 것처럼 ![image-20200722102317686](practice6.assets/image-20200722102317686.png) MYSQL_MASTER를 true로 바꿔줘야 한다 ```powershell $ docker run -d -p 3306:3306 -e MYSQL_MASTER=true -e MYSQL_ALLOW_EMPTY_PASSWORD=true manager:5000/example/tododb ``` 이제 mysql이 올라가긴했다 그럼 뭐가 문제였을까? 다음 도커파일 내용을 보자 ![image-20200722103345730](practice6.assets/image-20200722103345730.png) 먼저 add-server-id.sh가 복사되는 파일 경로를 살펴보자 ```powershell $ docker exec -it [mysql 컨테이너 ID] bash # mysql 배시 접속 root@b53f452df94a:/usr/local/bin$ ls -l # add-server-id.sh 파일을 확인해보면 ``` ![image-20200722103659491](practice6.assets/image-20200722103659491.png) ![image-20200722103751560](practice6.assets/image-20200722103751560.png) 다음과 같이 권한이 없는 것을 알 수 있다 마찬가지로 다른 파일들 역시 권한이 없을 것이다. 다시 manager로 돌아와서 권한 추가해주자 ```powershell $ chmod +x add-server-id.sh $ chmod +x init-data.sh $ chmod +x prepare.sh ``` 그리고 mysql 실행을 위해 임의로 설정했던 서버id를 해제한다 ```powershell mysqld.conf에서 #servser-id=1 주석처리한다 Dockerfile # entrypoint부분 주석처리 모두 해제해준다 ``` todo-mysql.yml에 슬레이브 추가해준다 ```yml # master랑 들여쓰기 맞춰야함!! slave: image: manager:5000/example/tododb:latest deploy: replicas: 2 placement: constraints: [node.role != manager] depends_on: - master environment: MYSQL_MASTER_HOST: master MYSQL_ROOT_PASSWORD: gihyo MYSQL_DATABASE: tododb MYSQL_USER: gihyo MYSQL_PASSWORD: gihyo MYSQL_ROOT_PASSWORD: gihyo MYSQL_REPL_USER: repl MYSQL_REPL_PASSWORD: gihyo networks: - todoapp ``` 다시 빌드하고 푸시해준다. 그리고 마스터 노드 찾아내 접속 ```powershell $ docker exec -it [컨테이너ID] bash root@296f74136813:/$ init-data.sh # 불러올 테이블 실행 root@296f74136813:/$ mysql -uroot -p # 비밀번호 gihyo ``` 이제 tododb라는 테이블이 마스터노드 뿐만 아니라 슬레이브에서도 보이는 것을 확인할 수 있다(연동됨) - 마스터 ![image-20200722132012558](practice6.assets/image-20200722132012558.png) - 슬레이브 ![image-20200722132012558](practice6.assets/image-20200722132012558-1604908682254.png)
21.535627
168
0.664461
kor_Hang
0.998675
119538f9865da683cda6748cca9f6a502fb78f19
12,412
md
Markdown
articles/active-directory/enterprise-users/groups-self-service-management.md
dbellt/azure-docs.es-es
d41bea42dcfdc1119c1a7a6465c6d0a0f04ba7bf
[ "CC-BY-4.0", "MIT" ]
null
null
null
articles/active-directory/enterprise-users/groups-self-service-management.md
dbellt/azure-docs.es-es
d41bea42dcfdc1119c1a7a6465c6d0a0f04ba7bf
[ "CC-BY-4.0", "MIT" ]
null
null
null
articles/active-directory/enterprise-users/groups-self-service-management.md
dbellt/azure-docs.es-es
d41bea42dcfdc1119c1a7a6465c6d0a0f04ba7bf
[ "CC-BY-4.0", "MIT" ]
null
null
null
--- title: Configuración de la administración de grupos de autoservicio en Azure Active Directory | Microsoft Docs description: Creación y administración de grupos de seguridad o grupos de Microsoft 365 en Azure Active Directory y solicitud de la pertenencia a estos grupos services: active-directory documentationcenter: '' author: curtand manager: daveba editor: '' ms.service: active-directory ms.subservice: enterprise-users ms.workload: identity ms.topic: how-to ms.date: 05/18/2021 ms.author: curtand ms.reviewer: krbain ms.custom: it-pro;seo-update-azuread-jan ms.collection: M365-identity-device-management ms.openlocfilehash: 47e3ea0a8ea5dc8dbb01d532a52436ed581311e7 ms.sourcegitcommit: 17345cc21e7b14e3e31cbf920f191875bf3c5914 ms.translationtype: HT ms.contentlocale: es-ES ms.lasthandoff: 05/19/2021 ms.locfileid: "110089904" --- # <a name="set-up-self-service-group-management-in-azure-active-directory"></a>Configuración de la administración de grupos de autoservicio en Azure Active Directory Puede permitir que los usuarios creen y administren sus propios grupos de seguridad o grupos de Microsoft 365 en Azure Active Directory (Azure AD). El propietario del grupo puede aprobar o rechazar solicitudes de pertenencia y puede delegar el control de la pertenencia a grupos. Las características de administración de grupos de autoservicio no están disponibles para grupos de seguridad habilitados para correo electrónico o listas de distribución. ## <a name="self-service-group-membership-defaults"></a>Valores predeterminados de pertenencia a grupos de autoservicio Cuando se crean grupos de seguridad en Azure Portal o con Azure AD PowerShell, solo los propietarios del grupo pueden actualizar pertenencia. Los grupos de seguridad creados mediante autoservicio en el [Panel de acceso](https://account.activedirectory.windowsazure.com/r#/joinGroups) y todos los grupos de Microsoft 365 están disponibles para que se unan todos los usuarios, tanto los aprobados por el propietario como los aprobados automáticamente. En el Panel de acceso, puede cambiar las opciones de pertenencia al crear el grupo. Grupos creados en | Comportamiento predeterminado del grupo de seguridad | Comportamiento predeterminado del grupo de Microsoft 365 ------------------ | ------------------------------- | --------------------------------- [Azure AD PowerShell](../enterprise-users/groups-settings-cmdlets.md) | Solo los propietarios pueden agregar miembros<br>Visible pero no está disponible para unión en el Panel de acceso | Abierto para unirse todos los usuarios [Azure Portal](https://portal.azure.com) | Solo los propietarios pueden agregar miembros<br>Visible pero no está disponible para unión en el Panel de acceso<br>El propietario no se asigna automáticamente durante la creación del grupo | Abierto para unirse todos los usuarios [Panel de acceso](https://account.activedirectory.windowsazure.com/r#/joinGroups) | Abierto para unirse todos los usuarios<br>Las opciones de pertenencia se pueden cambiar cuando se crea el grupo | Abierto para unirse todos los usuarios<br>Las opciones de pertenencia se pueden cambiar cuando se crea el grupo ## <a name="self-service-group-management-scenarios"></a>Escenarios de administración de grupos de autoservicio * **Administración de grupos delegados** Por ejemplo, un administrador que administra el acceso a una aplicación SaaS que su compañía usa. La administración de estos derechos de acceso se está volviendo compleja, por lo que este administrador solicita al propietario de la empresa la creación de un nuevo grupo. El administrador asigna al nuevo grupo acceso a la aplicación y le agrega todas las personas que ya acceden a la aplicación. Luego, el propietario de la empresa puede agregar más usuarios, y dichos usuarios se aprovisionan automáticamente en la aplicación. No es preciso que el propietario espere a que el administrador administre el acceso de los usuarios. Si el administrador concede el mismo permiso a un administrador de otro grupo de negocios, dicha persona también puede administrar el acceso de sus propios usuarios. Ni el propietario de una empresa ni el administrador pueden ver ni administrar las pertenencias al grupo del otro. El administrador podrá seguir viendo todos los usuarios que tienen acceso a la aplicación y bloquear los derechos de acceso si fuera necesario. * **Administración de grupos de autoservicio** Por ejemplo, dos usuarios tienen sitios de SharePoint Online configurados de forma independiente. Ellos quieren dar al equipo del otro acceso a sus sitios. Para ello, pueden crear un grupo en Azure AD, y en SharePoint Online cada uno de ellos selecciona dicho grupo para proporcionar acceso a sus sitios. Cuando alguien desea tener acceso, lo solicita en el Panel de acceso, y tras la aprobación obtiene acceso a ambos sitios de SharePoint Online automáticamente. Posteriormente, uno de ellos decide que todas las personas que accedan a su sitio también deben obtener acceso a una aplicación SaaS concreta. El administrador de la aplicación SaaS puede agregar derechos de acceso a la aplicación en el sitio de SharePoint Online. Desde ese momento, todas las solicitudes aprobadas darán acceso tanto a los dos sitios de SharePoint Online como a la aplicación SaaS. ## <a name="make-a-group-available-for-user-self-service"></a>Puesta a disposición de un grupo para el autoservicio del usuario 1. Inicie sesión en el [Centro de administración de Azure AD](https://aad.portal.azure.com) con una cuenta a la que se haya asignado el rol Administrador global o Administrador de roles con privilegios para el directorio. 1. Seleccione **Grupos** y, a continuación, seleccione el valor **General**. ![Configuración general de grupos de Azure Active Directory](./media/groups-self-service-management/groups-settings-general.png) 1. Establezca **Los propietarios pueden administrar solicitudes de pertenencia a grupos en el Panel de acceso** en **Sí**. 1. Establezca **Restringir la capacidad del usuario para acceder a las características de grupos del panel de acceso** en **No**. 1. Si establece **Los usuarios pueden crear grupos de seguridad en Azure Portal, la API o PowerShell** o **Los usuarios pueden crear grupos de Microsoft 365 en Azure Portal, la API o PowerShell** en - **Sí**: todos los usuarios de la organización de Azure AD pueden crear nuevos grupos de seguridad y agregar miembros a estos grupos en Azure Portal, la API o PowerShell. Estos grupos nuevos también se muestran en el Panel de acceso para los restantes usuarios. Si la configuración de la directiva en el grupo lo permite, otros usuarios pueden crear solicitudes para unirse a dichos grupos. - **No**: los usuarios no pueden crear grupos ni cambiar los grupos existentes de los que sean propietarios. Sin embargo, pueden administrar la pertenencia a dichos grupos y aprobar las solicitudes de otros usuarios para unirse a ellos. Esta configuración se cambió recientemente para agregar compatibilidad con la API y PowerShell. Para obtener más información sobre este cambio, vea la siguiente sección [Cambio de configuración de grupos](#groups-setting-change). También puede usar **Owners who can assign members as group owners in the Azure portal** (Los propietarios pueden asignar miembros como propietarios de grupos en Azure Portal) para conseguir un control más pormenorizado sobre la administración de grupos de autoservicio para los usuarios. Cuando los usuarios puedan crear grupos, todos los usuarios de su organización podrán crear nuevos grupos y, a continuación, podrán, como propietarios predeterminados, agregarles miembros. No puede especificar a personas que puedan crear sus propios grupos. Solo puede especificar a personas para convertir a otro miembro del grupo en propietario del grupo. > [!NOTE] > Se requiere una licencia de Azure Active Directory Premium (P1 o P2) para que los usuarios puedan solicitar unirse a un grupo de seguridad o a un grupo de Microsoft 365 y para que los propietarios puedan aprobar o denegar las solicitudes de pertenencia. Sin una licencia de Azure Active Directory Premium, los usuarios todavía pueden administrar sus grupos en el Panel de acceso, pero no pueden crear un grupo que requiera la aprobación del propietario en el Panel de acceso y no podrán solicitar unirse a un grupo. ## <a name="groups-setting-change"></a>Cambio de configuración de grupos La configuración de los grupos de seguridad actuales y los grupos de Microsoft 365 se están dejando en desuso y reemplazando. La configuración actual se está reemplazando porque solo controla la creación de grupos en Azure Portal y no se aplica a la API ni a PowerShell. La nueva configuración controla la creación de grupos en Azure Portal, así como en la API y en PowerShell. | Configuración en desuso | Nueva configuración | | --- | --- | | Los usuarios pueden crear grupos de seguridad en Azure Portal. | Los usuarios pueden crear grupos de seguridad en Azure Portal, la API o PowerShell. | | Los usuarios pueden crear grupos de Microsoft 365 en Azure Portal. | Los usuarios pueden crear grupos de Microsoft 365 en Azure Portal, la API o PowerShell. | Hasta que la configuración actual esté totalmente en desuso, ambas opciones aparecerán en Azure Portal. Debe configurar esta nueva opción antes de finales de **mayo de 2021**. Para configurar los grupos de seguridad, debe tener asignado el rol Administrador global o Administrador de roles con privilegios. ![Cambio de configuración de grupos de seguridad de Azure Active Directory](./media/groups-self-service-management/security-groups-setting.png) La tabla siguiente le ayudará a decidir qué valores elegir. | Si desea: | Elija estos valores | | --- | --- | | Los usuarios pueden crear grupos mediante Azure Portal, API o PowerShell | Establezca ambas opciones en **Sí**. Los cambios pueden tardar hasta 15 minutos en surtir efecto. | | Los usuarios **no pueden** crear grupos mediante Azure Portal, la API ni PowerShell | Establezca ambos valores en **No**. Los cambios pueden tardar hasta 15 minutos en surtir efecto. | | Los usuarios pueden crear grupos mediante Azure Portal, pero no mediante la API ni PowerShell | No compatible | | Los usuarios pueden crear grupos mediante la API o PowerShell, pero no mediante Azure Portal | No compatible | En la tabla siguiente se muestra lo que sucede para los distintos valores de esta configuración. No se recomienda tener la configuración en desuso y la nueva configuración establecidas en valores diferentes. | Los usuarios pueden crear grupos mediante Azure Portal | Los usuarios pueden crear grupos mediante Azure Portal, API o PowerShell | Efecto en el inquilino | | :---: | :---: | --- | | Sí | Sí | Los usuarios pueden crear grupos mediante Azure Portal, la API o PowerShell. Los cambios pueden tardar hasta 15 minutos en surtir efecto.| | No | No | Los usuarios **no pueden** crear grupos mediante Azure Portal, la API ni PowerShell. Los cambios pueden tardar hasta 15 minutos en surtir efecto. | | Sí | No | Los usuarios **no pueden** crear grupos mediante Azure Portal, la API ni PowerShell. No se recomienda establecer esta configuración en valores diferentes. Los cambios pueden tardar hasta 15 minutos en surtir efecto. | | No | Sí | Hasta que la opción **Los usuarios pueden crear grupos mediante Azure Portal** esté totalmente en desuso en **junio de 2021**, los usuarios pueden crear grupos mediante la API o PowerShell, pero no con Azure Portal. A partir de algún día indeterminado de **junio de 2021**, la opción **Los usuarios pueden crear grupos mediante Azure Portal, la API o PowerShell.** se hará efectiva y los usuarios podrán crear grupos mediante Azure Portal, la API o PowerShell. | ## <a name="next-steps"></a>Pasos siguientes Estos artículos proporcionan información adicional sobre Azure Active Directory. * [Administración del acceso a recursos con grupos de Azure Active Directory](../fundamentals/active-directory-manage-groups.md) * [Cmdlets de Azure Active Directory para configurar las opciones de grupo](../enterprise-users/groups-settings-cmdlets.md) * [Administración de aplicaciones en Azure Active Directory](../manage-apps/what-is-application-management.md) * [¿Qué es Azure Active Directory?](../fundamentals/active-directory-whatis.md) * [Integre las identidades locales con Azure Active Directory](../hybrid/whatis-hybrid-identity.md)
112.836364
1,094
0.79399
spa_Latn
0.992659
11958cb8627faa21ba778216276c2170c40f1359
454
md
Markdown
markdown/org/docs/patterns/simone/options/collargap/en.md
MS-GAELsews/freesewing
c892c79aacba3172708d87262284463ca0c9423e
[ "MIT" ]
174
2018-08-25T13:46:07.000Z
2022-03-13T22:34:10.000Z
markdown/org/docs/patterns/simone/options/collargap/en.md
MS-GAELsews/freesewing
c892c79aacba3172708d87262284463ca0c9423e
[ "MIT" ]
1,029
2018-08-13T08:44:55.000Z
2022-03-31T20:35:42.000Z
markdown/org/docs/patterns/simone/options/collargap/en.md
MS-GAELsews/freesewing
c892c79aacba3172708d87262284463ca0c9423e
[ "MIT" ]
100
2018-09-18T18:11:38.000Z
2022-03-31T17:55:09.000Z
![Collar gap](collargap.svg) Distance the collar sits apart when closed. <Note> This is really mostly a style choice, but a wider collar gap (together with the collar angle) can accomodate a wider tie (knot). </Note> ## Effect of this option on the pattern ![This image shows the effect of this option by superimposing several variants that have a different value for this option](simone_collargap_sample.svg "Effect of this option on the pattern")
34.923077
191
0.77533
eng_Latn
0.999441
119598f08ba1d6bb718f8e31491641954309c50d
13,803
md
Markdown
howto6/working-with-lists-in-a-microflow.md
j3lte/docs
71c588fa362c1d91f9f89c9f221ad5c960a631b8
[ "CC-BY-4.0" ]
1
2016-10-07T06:48:33.000Z
2016-10-07T06:48:33.000Z
howto6/working-with-lists-in-a-microflow.md
ishan1729/docs
afaa9b86100beb83e8db9e1675bc9f18603dc6d2
[ "CC-BY-4.0" ]
null
null
null
howto6/working-with-lists-in-a-microflow.md
ishan1729/docs
afaa9b86100beb83e8db9e1675bc9f18603dc6d2
[ "CC-BY-4.0" ]
null
null
null
--- title: "Working With Lists in a Microflow" space: "Mendix 6 How-to's" category: "Logic & Business Rules" tags: [] --- In this how-to you will learn how to work with a list of objects in a Microflow. To manage this list you will first retrieve a filtered list of objects from the database. Mendix utilizes XPath constraints to apply filters. To learn more about XPath, take a look at this [documentation](/refguide6/xpath-contains). Secondly, you will iterate over the retrieved list of objects and calculate the total price of all the orders in a [Loop](/refguide6/loop). You will end this how-to with an alternative to calculating aggregated values over a list of objects. ## 1\. Preparing the Data Structure, GUI and Example Data To see the results of this how-to it is necessary that you setup a test project with test data. Before you continue, make sure that you know how to create: * **Domain models**, if you need more info, take a look at this [how-to](create-a-basic-data-layer). * **Overview and detail pages**, if you need more info, take a look at this [how-to](create-your-first-two-overview-and-detail-pages). * **Menu items**, if you need more info, take a look at this [how-to](setting-up-the-navigation-structure). 1. Create the following domain model: ![](attachments/18448705/18581378.png) 2. Create **overview** and **detail** pages to manage objects of type **Customer** and **Order**. 3. Create **menu items** to access the **Order** and the **Customer** overview pages. 4. Add the following customer data to your app: ![](attachments/18448705/18581374.png) 5. Add the following order data to your app: ![](attachments/18448705/18581373.png) ## 2\. Retrieving a Filtered List of Objects from the Database In the previous section you have set up a basic data structure and created some sample data. In this section you will retrieve all the 'Processing' orders. To achieve this you will add a microflow button to the 'Orders' overview. In this microflow you will add a 'Retrieve from database' 'Action activity' with an XPath constraint. The XPath constraint will filter the retrieved list to only the 'Invoiced' orders. 1. Add a new **microflow** to your module. _![](attachments/18448686/18581095.png)_ 2. Name the Microflow _IVK_SetOrderToComplete_. ![](attachments/18448686/18581093.png) 3. Save the new menu item by clicking **OK**. You should see an empty Microflow like this: ![](attachments/8784287/8946316.png) 4. Add a **Microflow button** to the toolbar of the orders overview and change its caption to _Set Processing to Complete_. ![](attachments/18448686/18581118.png) 5. Right click the Microflow button and click **Select Microflow** from the context menu. ![](attachments/18448686/18581098.png) 6. Select the **IVK_SetOrderToComplete** Microflow and click **select**. ![](attachments/18448686/18581054.png) 7. Open the **IVK_SetOrderToComplete** Microflow created in the first steps by double clicking at it in the Project Explorer. ![](attachments/18448686/18581092.png) You should see the empty Microflow again: ![](attachments/8784287/8946316.png) 8. Open the **Toolbox**. It should be on the bottom right of the Mendix Modeler. ![](attachments/8784287/8946802.png) 9. Drag a **Retrieve** action from the toolbox to the line between the green start and red end event. This inserts a retrieve action activity. ![](attachments/18448686/18581091.png) 10. Double click the retrieve activity to open its properties. ![](attachments/18448686/18581090.png) 11. Select **From database** as _source_ option. ![](attachments/18448686/18581089.png) 12. Set the following properties:<br> a. For _Entity_, select **Order**<br> b. For _Name_, enter **OrderList**<div class="alert alert-info"><br> <div class="alert alert-info"> With the currents settings your retrieve action gets every order in the database, using the XPath expression in the following steps you will filter the results that come back from the database. </div> 13. Add the following XPath expression in the XPath constraint field: _[OrderStatus = 'Processing']_. This expression will filter the list to only orders with the status **Processing**. 14. Enter a descriptive name for the list variable. Your properties screen should look like this: ![](attachments/18448686/18581088.png) <div class="alert alert-info"> With the currents settings your retrieve action gets all the 'Processing' orders in the database. In the next section you will edit this list of orders. </div> You should see a Microflow like this: ![](attachments/18448686/18581087.png) ## 3\. Iterate Over a List of Objects In the previous section you retrieved a list of orders with the status 'Processing'. In this section you will iterate over this list and change the status of each object individually to 'Complete'. To do so you will use a 'Loop' to iterate over the 'OrderProcessingList' and use the 'Change object' activity to change the status of the order object. 1. Open the **IVK_SetOrderToComplete** microflow created in the previous section. ![](attachments/18448686/18581087.png) 2. Drag a **Loop** action from the toolbox to the line, behind the **OrderProcessingList** action activity. ![](attachments/18448686/18581086.png) <div class="alert alert-info"> For each object the flow inside the loop is executed. The flow starts at the element that has no incoming sequence flows. A loop can contain all elements used in microflows, with the exception of start and stop events. Additionally, a loop (and only a loop) can contain break events and continue events. The iterator which looks the same as an input object represents the variable that holds one element of the list for each iteration. Beneath it the name of the variable is shown in black and the type of the variable in blue. For more information take a look at this [documentation](/refguide6/loop?utm_source=businessmodeler&utm_medium=software&utm_campaign=modeler) </div> 3. Double click the loop activity and select the **OrderProcessingList** to iterate over. ![](attachments/18448686/18581085.png) 4. Drag a **Change object** activity inside the loop: ![](attachments/18448686/18581084.png) 5. Double click the **change activity** to open its properties. ![](attachments/18448686/18581083.png) 6. Select the **IteratorOrder** at the **Variable** drop down and click the **New** button. ![](attachments/18448686/18581082.png) You will see the following properties screen: ![](attachments/18448686/18581081.png) 7. Set the following properties:<br> a. For **Member**, select _Orderstatus_.<br> b. For **Value**, enter _MyFirstModule.OrderStatus.Complete.<br> ![](attachments/18448686/18581080.png) Click **OK**. Your properties screen should look like this: ![](attachments/18448686/18581078.png) <div class="alert alert-warning"> Set 'Commit' and 'Refresh in Client' to 'Yes' to commit your changes to the database and refresh your list in the client so your changes will be visible. </div> 8. Click **OK**. Your microflow should look like this: ![](attachments/18448686/18581076.png) 9. **Re-deploy** your application. 10. Click the **Set Processing to Complete** button. The orders with status 'Processing' will now be changed to 'Complete'. ![](attachments/18448686/18581113.png) ## 4\. Calculating a Total List Value Using a Variable and a Loop In the previous section you iterated over a filtered list of objects using a 'Loop'. In this section you will use a loop to calculate the total sum of all your orders. To calculate this sum you will generate a variable, which will be changed for every iteration in the loop. 1. Add an empty microflow to your order datagrid and name it _IVK_CalculateTotalPriceOrders._ 2. Name the microflow button _Calculate total order price_. If you don't know how to do this please look at step 1-6 of section 2.0 of this how-to. ![](attachments/18448686/18581074.png) 3. Add a **Retrieve** activity for **Orders** and a loop for this list to the microflow. ![](attachments/18448686/18581106.png) 4. Drag a **Create variable** before the Orderslist. ![](attachments/18448686/18581073.png) 5. Double click the **variable** to open its properties. ![](attachments/18448686/18581072.png) 6. Set the following properties: 1. For **Data type**, select _Float/Currency_ 2. For **Value**, enter _0_ 3. For **Variable**, enter _CalculatedTotalPrice_ 7. Add a **Change** variable inside the loop. <div class="alert alert-warning"> It is not possible to drag an activity directly into a loop. So drag the activity first outside the loop, than drag it inside the loop. </div> ![](attachments/18448686/18581069.png) 8. Double click the **change** variable activity to open its **properties**. ![](attachments/18448686/18581068.png) 9. Set the following properties:<br> a. For **Variable**, select _CalculatedTotalPrice_<br> b. For **Value**, enter _$CalculatedTotalPrice + $IteratorOrder/TotalPrice_<br> <div class="alert alert-info"> By iterating over the list, the price of every order will be added one by one to the 'CalculatedTotalPrice' variable </div> Your properties screen should look like this: ![](attachments/18448686/18581067.png) 10. Drag a **Show Message** action from the toolbox to the end of the microflow. ![](attachments/18448686/18581066.png) 11. Double click the **message** activity to open its properties. ![](attachments/18448686/18581065.png) 12. Set the following properties: 1. For **Template**, enter _Total calculated price: {1}._ 2. For **Parameters**, add _toString($CalculatedTotalPrice)_. ![](attachments/18448686/18581064.png) 13. Click **OK**. You should see a Microflow like this: ![](attachments/18448686/18581063.png) 14. **Re-deploy** your application. 15. Click the **Calculate total order price** button and you will see the price of all the orders added up. ![](attachments/18448686/18581103.png) ## 5\. Calculate a Total List Value Using an Aggregate Function In the previous section you iterated over a list to add the value of single object to a total price variable. In this section you will use the 'aggregate list' function to calculate the total price instead of using a loop. The aggregate list can be used to calculate aggregated values such as the maximum, minimum, sum, average and total amount of objects over a list of objects. 1. Open the **IVK_CalculateTotalPriceOrders** microflow and remove the loop and the **CalculatedTotalPrice** variable. ![](attachments/18448686/18581062.png) 2. Add an **Aggregate list** activity after the **Orderlist**. ![](attachments/18448686/18581061.png) 3. Double click the aggregate list activity to open its properties. ![](attachments/18448686/18581060.png) 4. Set the following properties:<br> a. For **Variable**, select _OrderList_.<br> b. For **Function**, select _Sum_.<br> c. For **Attribute**, select _TotalPrice_.<br> d. For **Variable**, enter a descriptive name like _SumTotalPrice_.<br> <div class="alert alert-info"> See the [documentation](/refguide6/aggregate-list) for the description of the other functions. </div> ![](attachments/18448686/18581059.png) 5. Click **OK**. 6. Double click on the message activity. ![](attachments/18448686/18581056.png) 7. Replace the **$CalculatedTotalPrice** variable in the **Parameters Expression** by the **$SumTotalPrice** variable ![](attachments/18448686/18581055.png) 8. Click **OK**. Your microflow should look like this: ![](attachments/18448686/18581058.png) 9. Re-deploy your application. 10. Click the **Calculate total order price** button and you will see the same price of all the orders added up. ![](attachments/18448686/18581103.png) ## 6\. Filter List of Orders on the City of the Associated Customers In the previous sections you filtered the list of orders from database on attributes of the order entity itself. In this section you will constrain on attributes over the associated customer object. In the example of this section you will set the order status of all customers in Rotterdam to the status 'Complete'. 1. Open the microflow **IVK_SetOrderToComplete**. ![](attachments/18448686/18581112.png) 2. Open the **OrderProcessingList** activity. 3. Add an **XPath constraint** over the association to customer, constraining on the city (Rotterdam) of this customer. ![](attachments/18448686/18581111.png) 4. Click **OK** and **re-deploy** your application. 5. Open the application in the browser. 6. Click the **Set Processing to Complete** button. All the orders from customers in Rotterdam are set to **Complete**. ![](attachments/18448686/18581110.png) ## 7\. Related content * [Defining access rules using XPath](define-access-rules-using-xpath) * [Extending Your Application with Custom Java](extending-your-application-with-custom-java) * [Working With Lists in a Microflow](working-with-lists-in-a-microflow) * [Triggering Logic using Microflows](triggering-logic-using-microflows) * [Creating a Custom Save Button](create-a-custom-save-button) * [Optimizing Retrieve Activities](optimizing-retrieve-activities) * [Error Handling](set-up-error-handling) * [Optimizing Microflow Aggregates](optimizing-microflow-aggregates) * [Extract and use sub microflows](extract-and-use-sub-microflows) * [XPath](/refguide6/xpath) * [XPath Constraints](/refguide6/xpath-constraints) * [Aggregate List](/refguide6/aggregate-list)
55.657258
555
0.737303
eng_Latn
0.966871
1195e633885d90499bd28d15c369e137a6704a44
45
md
Markdown
README.md
ryan-a-s/covid-tracker
dc0c5a2d1cc5763a077d3251260dc484c44f9276
[ "MIT" ]
null
null
null
README.md
ryan-a-s/covid-tracker
dc0c5a2d1cc5763a077d3251260dc484c44f9276
[ "MIT" ]
null
null
null
README.md
ryan-a-s/covid-tracker
dc0c5a2d1cc5763a077d3251260dc484c44f9276
[ "MIT" ]
null
null
null
# covid-tracker COVID-19 Tracker Application
15
28
0.822222
kor_Hang
0.470651
119647174db2fed7c93cfd2337030e2f867bf67f
5,003
md
Markdown
livingdoc-typescript-plugin/README.md
A-Wiedemann/living-documentation
3925275e280b7e94bda7b05b9155c0ad037fccbb
[ "Apache-2.0" ]
26
2017-05-16T07:07:37.000Z
2022-02-23T01:57:05.000Z
livingdoc-typescript-plugin/README.md
A-Wiedemann/living-documentation
3925275e280b7e94bda7b05b9155c0ad037fccbb
[ "Apache-2.0" ]
15
2017-05-09T16:03:28.000Z
2021-12-09T19:46:37.000Z
livingdoc-typescript-plugin/README.md
A-Wiedemann/living-documentation
3925275e280b7e94bda7b05b9155c0ad037fccbb
[ "Apache-2.0" ]
6
2017-05-16T14:52:45.000Z
2021-10-05T21:19:54.000Z
# livingdoc-typescript-plugin Living documentation plugin for typescript. [![npm Version](https://img.shields.io/npm/v/livingdoc-typescript-plugin.svg)](https://www.npmjs.com/package/livingdoc-typescript-plugin) [![build-status](https://travis-ci.org/jboz/living-documentation.svg?branch=master)](https://travis-ci.org/jboz/livingdoc-typescript-plugin) [![js-standard-style](https://img.shields.io/badge/code%20style-standard-brightgreen.svg?style=flat)](https://github.com/feross/standard) ## Usage ### Install ```shell npm install --global livingdoc ``` ### No installation ```shell npx livingdoc ... ``` ### Generate classes diagram ```bash livingdoc-typescript-plugin diagram -i src\domain\**\*.ts -o dist\domain-classes.svg ``` Result example : <img src="./docs/diagram.svg"> ### Generate glossary ```bash livingdoc-typescript-plugin glossary -i src\domain\**\*.ts -o dist\glossary.md ``` Result example : | ObjectName | Attribute name | Type | Description | | ---------------------------- | ---------------------------- | ------------ | ------------------------------------------------------------------------- | | [Access](Access) | | | | | | [phoneNumber](phoneNumber) | string | | | | [price](price) | number | | | | [dateTime](dateTime) | string | | | [Bill](Bill) | | | @RootAggregate<br>Monthly bill. | | | [month](month) | string | Which month of the bill. | | | [contract](contract) | Contract | Contract concerned by the bill. | | | [accesses](accesses) | Access[] | Bill contents. | | | [paymentState](paymentState) | PaymentState | Bill payment state | | [BillsService](BillsService) | | | | | [CallAccess](CallAccess) | | | | | | [duration](duration) | string | | | [Contract](Contract) | | | Telecom contract | | | [id](id) | number | Contract identifier.<br>Generate by the system and communicate to client. | | | [customer](customer) | Customer | Contract customer. | | [Customer](Customer) | | | Customer of the telecom service | | | [email](email) | string | Email of the customer. | | | [contracts](contracts) | Contract[] | Customer's contracts. | | [PaymentState](PaymentState) | | Enumeration | Bill payment state values. | | | [WAITING](WAITING) | | Wainting payment by the client. | | | [DONE](DONE) | | Client has payed. | | [SmsAccess](SmsAccess) | | | | ## Options ### -i, --input <path> Define the path of the Typescript file ### -o, --output <path> Define the path of the output file. If not defined, it'll output on the STDOUT ### -d, --deep <boolean> Indicate if program must through dependancies content or not
64.141026
155
0.329602
eng_Latn
0.358053
119704b5db1c23ccdd7932391b8cd9fa21060a85
1,311
md
Markdown
docs/visual-basic/misc/bc42328.md
jhonyfrozen/docs.pt-br
c9e86b6a5de2ff8dffd54dd64d2e87aee85a5cb8
[ "CC-BY-4.0", "MIT" ]
null
null
null
docs/visual-basic/misc/bc42328.md
jhonyfrozen/docs.pt-br
c9e86b6a5de2ff8dffd54dd64d2e87aee85a5cb8
[ "CC-BY-4.0", "MIT" ]
null
null
null
docs/visual-basic/misc/bc42328.md
jhonyfrozen/docs.pt-br
c9e86b6a5de2ff8dffd54dd64d2e87aee85a5cb8
[ "CC-BY-4.0", "MIT" ]
null
null
null
--- title: A expressão 'AddressOf' não tem nenhum efeito nesse contexto porque o argumento de método para 'AddressOf' requer uma conversão reduzida no tipo delegado do evento ms.date: 07/20/2015 f1_keywords: - vbc42328 - bc42328 helpviewer_keywords: - BC42328 ms.assetid: 1561e5bf-b8ab-4a67-990d-c3a76e2ab353 ms.openlocfilehash: de042a7a4fcafce14066485ae6be58e1914dfea8 ms.sourcegitcommit: 9b552addadfb57fab0b9e7852ed4f1f1b8a42f8e ms.translationtype: MT ms.contentlocale: pt-BR ms.lasthandoff: 04/23/2019 ms.locfileid: "61912737" --- # <a name="the-addressof-expression-has-no-effect-in-this-context-because-the-method-argument-to-addressof-requires-a-relaxed-conversion-to-the-delegate-type-of-the-event"></a>A expressão 'AddressOf' não tem nenhum efeito nesse contexto porque o argumento de método para 'AddressOf' requer uma conversão reduzida no tipo delegado do evento ## <a name="to-correct-this-error"></a>Para corrigir este erro 1. Atribuir a expressão 'AddressOf' a uma variável e usar a variável para adicionar ou remover o método como o manipulador. ## <a name="see-also"></a>Consulte também - [Operador AddressOf](../../visual-basic/language-reference/operators/addressof-operator.md) - [Eventos (Visual Basic)](~/docs/visual-basic/programming-guide/language-features/events/index.md)
48.555556
339
0.789474
por_Latn
0.927032
11975bb36cbcdcd0b01712c3ec811a6769b11520
3,750
md
Markdown
components/automate-deployment/README.md
mskdenigma/automate
03d67544eac7bca5a58929bad05a6d7cd3e59b54
[ "Apache-2.0" ]
1
2020-10-19T20:20:08.000Z
2020-10-19T20:20:08.000Z
components/automate-deployment/README.md
mskdenigma/automate
03d67544eac7bca5a58929bad05a6d7cd3e59b54
[ "Apache-2.0" ]
null
null
null
components/automate-deployment/README.md
mskdenigma/automate
03d67544eac7bca5a58929bad05a6d7cd3e59b54
[ "Apache-2.0" ]
null
null
null
# Automate Deployment The deployment-service and automate-cli are responsible for the installation and management of Chef Automate. This includes: - initial installation - Automate 1 to Automate 2 migrations - configuration changes - automatic and manual Automate 2 upgrades - backup and restore - system diagnostics This directory contains the deployment-service -- a long running service that provides gRPC APIs to accomplish the above tasks. The [automate-cli](../automate-cli) component contains the chef-automate command which provides the user interface to the deployment-service's capabilities. # Development ## Development Environments For most day-to-day development, you can use the Habitat studio-based development environment described in our [developer documentation](../../dev-docs/DEV_ENVIRONMENT) Most new development features and test should be designed to work in the studio if possible. We also try to ensure that the build, unit tests, and linter work from outside the studio, using the Makefile directly: - `make build` - `make unit` - `make lint` - `make fmt` Because the deployment-service is responsible for OS setup and bootstrapping, we also have specialized development environments described in a section below. ## Testing New features and bug fixes require tests. We are not dogmatic about the kind of test you should write. Write a test that: - Will break if the feature breaks; - Can be integrated into CI; and - Can be run by developers with a level of effort proportional to the confidence the test gives. We try to test at 2 levels: - "unit" tests: `make unit` These tests are written using the standard Go testing tools. You can run them with `make unit` from the current directory. While we currently have substantial mocking in our unit test, our goal is to reduce and avoid mocking over time. Unit tests that talk to databases and filesystems are absolutely fine provided they run quickly and reliably. - Integration tests Because deployment-service is focused on interactions with the underlying operating system and Habitat service supervisor, integration tests are our gold standard for whether a feature works. We have an [integration test framework](../../integration) that runs a complete A2 installation inside a docker container. From there, we write tests in Go using the [`chef-automate diagnostics`](../automate-cli/pkg/diagnostics) command or directly in Bash. ## Specialized Development Environment ### Vagrant based deployment-service development environment Because the deployment-service must deal with the underlying operating system configuration, we have custom development environments in addition to the Habitat studio development. The vagrant environment automatically mounts the automate directory to `/a2`. This allows you to deploy and test locally build artifacts in an environment more similar to the systems customers will be using. To get started: ``` host> # build any required components for testing via the habitat studio host> cd A2_ROOT/components/automate-deployment host> make linux host> vagrant up host> vagrant ssh vagrant> sudo -i vagrant> cd /a2/components/automate-deployment vagrant> make run ``` ### A1 to A2 test environments Most A1 to A2 migration development and testing can be done in the studio or the vagrant-based test environment using automate-cli's `--self-test` flag: ``` chef-automate migrate-from-v1 --self-test ``` In some cases, it is necessary to test the real migration code paths without the mock interfaces used by self-test. To facilitate this the `a1migration` directory has a docker-based test environment suited for that purpose. See the README in that directory for more details.
31.779661
72
0.787467
eng_Latn
0.998827
11976671f9ca7211354a5f78acfbc4ab3462074a
3,288
md
Markdown
articles/sentinel/connect-forcepoint-casb-ngfw.md
changeworld/azure-docs.pt-pt
8a75db5eb6af88cd49f1c39099ef64ad27e8180d
[ "CC-BY-4.0", "MIT" ]
null
null
null
articles/sentinel/connect-forcepoint-casb-ngfw.md
changeworld/azure-docs.pt-pt
8a75db5eb6af88cd49f1c39099ef64ad27e8180d
[ "CC-BY-4.0", "MIT" ]
null
null
null
articles/sentinel/connect-forcepoint-casb-ngfw.md
changeworld/azure-docs.pt-pt
8a75db5eb6af88cd49f1c39099ef64ad27e8180d
[ "CC-BY-4.0", "MIT" ]
null
null
null
--- title: Ligue produtos forcepoint ao Azure Sentinel. Microsoft Docs description: Saiba como ligar os produtos Forcepoint ao Azure Sentinel. services: sentinel author: yelevin editor: '' ms.service: azure-sentinel ms.subservice: azure-sentinel ms.devlang: na ms.topic: conceptual ms.tgt_pltfrm: na ms.workload: na ms.date: 02/20/2020 ms.author: yelevin ms.openlocfilehash: eb099a786a84f9b7d0a6f0dc6e6df9c3459af295 ms.sourcegitcommit: 2ec4b3d0bad7dc0071400c2a2264399e4fe34897 ms.translationtype: MT ms.contentlocale: pt-PT ms.lasthandoff: 03/28/2020 ms.locfileid: "77588234" --- # <a name="connect-your-forcepoint-products-to-azure-sentinel"></a>Ligue os seus produtos Forcepoint ao Azure Sentinel > [!IMPORTANT] > O conector de dados de produtos Forcepoint em Azure Sentinel encontra-se atualmente em pré-visualização pública. Esta funcionalidade é fornecida sem um acordo de nível de serviço, e não é recomendada para cargas de trabalho de produção. Algumas funcionalidades poderão não ser suportadas ou poderão ter capacidades limitadas. Para mais informações, consulte [os Termos Suplementares de Utilização para pré-visualizações](https://azure.microsoft.com/support/legal/preview-supplemental-terms/)do Microsoft Azure . Este artigo explica como ligar os seus produtos Forcepoint ao Azure Sentinel. Os conectores de dados do Forcepoint permitem ligar o Forcepoint Cloud Access Security Broker e os registos de Firewall da Próxima Geração com o Azure Sentinel. Desta forma, pode exportar automaticamente registos definidos pelo utilizador para o Azure Sentinel em tempo real. O conector dá-lhe visibilidade enriquecida nas atividades do utilizador registadas pelos produtos Forcepoint. Também permite uma maior correlação com os dados das cargas de trabalho do Azure e outros feeds, e melhora a capacidade de monitorização com os Livros de Trabalho dentro do Azure Sentinel. > [!NOTE] > Os dados serão armazenados na localização geográfica do espaço de trabalho em que está a executar o Azure Sentinel. ## <a name="forward-forcepoint-product-logs-to-the-syslog-agent"></a>Registos de produtos De Forcepoint avançados para o agente Syslog Configure o produto Forcepoint para encaminhar mensagens Syslog em formato CEF para o seu espaço de trabalho Azure através do agente Syslog. 1. Configurar o produto Forcepoint para a integração do Sentinel a Azure, conforme descrito nos seguintes guias de instalação: - [Guia de Integração CASB de Forcepoint](https://frcpnt.com/casb-sentinel) - [Guia de Integração de NGFW de Forcepoint NGFW](https://frcpnt.com/ngfw-sentinel) 2. Procure o CommonSecurityLog para utilizar o esquema relevante no Log Analytics com o nome Do Fornecedor de Dispositivos contém 'Forcepoint'. 3. Continuar a [PASSO 3: Validar a conectividade.](connect-cef-verify.md) ## <a name="next-steps"></a>Passos seguintes Neste documento, aprendeu a ligar produtos Da Forcepoint ao Azure Sentinel. Para saber mais sobre o Azure Sentinel, consulte os seguintes artigos: - Aprenda a [obter visibilidade nos seus dados e ameaças potenciais.](quickstart-get-visibility.md) - Começar [a detetar ameaças com o Azure Sentinel.](tutorial-detect-threats-built-in.md) - [Utilize livros](tutorial-monitor-your-data.md) de trabalho para monitorizar os seus dados.
55.728814
574
0.805657
por_Latn
0.995704
1198345175d1a35136ddd4f08491cf6f7b6e8fab
615
md
Markdown
VBA/Word-VBA/articles/shadowformat-style-property-word.md
oloier/VBA-content
6b3cb5769808b7e18e3aff55a26363ebe78e4578
[ "CC-BY-4.0", "MIT" ]
584
2015-09-01T10:09:09.000Z
2022-03-30T15:47:20.000Z
VBA/Word-VBA/articles/shadowformat-style-property-word.md
oloier/VBA-content
6b3cb5769808b7e18e3aff55a26363ebe78e4578
[ "CC-BY-4.0", "MIT" ]
585
2015-08-28T20:20:03.000Z
2018-08-31T03:09:51.000Z
VBA/Word-VBA/articles/shadowformat-style-property-word.md
oloier/VBA-content
6b3cb5769808b7e18e3aff55a26363ebe78e4578
[ "CC-BY-4.0", "MIT" ]
590
2015-09-01T10:09:09.000Z
2021-09-27T08:02:27.000Z
--- title: ShadowFormat.Style Property (Word) keywords: vbawd10.chm164364395 f1_keywords: - vbawd10.chm164364395 ms.prod: word api_name: - Word.ShadowFormat.Style ms.assetid: b25b4e5b-8123-a04f-c55e-5fb037e20df1 ms.date: 06/08/2017 --- # ShadowFormat.Style Property (Word) Returns or sets a **MsoShadowType** that represents the type of shadow formatting to apply to a shape. Read/write. ## Syntax _expression_ . **Style** _expression_ An expression that returns a **[ShadowFormat](shadowformat-object-word.md)** object. ## See also #### Concepts [ShadowFormat Object](shadowformat-object-word.md)
18.088235
115
0.75122
eng_Latn
0.616119
1198b3d0bf9a5cbc66a605c4b936241c0ed3481c
1,086
md
Markdown
collections/_pcs/Maphismo.md
errinlarsen/barrowmaze
d134dd52f41af8272df68c3f03a48302a5420f19
[ "Unlicense" ]
1
2020-12-09T04:51:59.000Z
2020-12-09T04:51:59.000Z
collections/_pcs/Maphismo.md
errinlarsen/barrowmaze
d134dd52f41af8272df68c3f03a48302a5420f19
[ "Unlicense" ]
18
2020-12-08T15:38:30.000Z
2020-12-10T15:40:02.000Z
collections/_pcs/Maphismo.md
errinlarsen/barrowmaze
d134dd52f41af8272df68c3f03a48302a5420f19
[ "Unlicense" ]
2
2020-12-09T04:50:19.000Z
2020-12-09T04:50:54.000Z
--- author: Maphismo title: Maphismo pcid: Maphismo layout: single date: 2020-09-10 20:23:29 -0700 excerpt: Cleric 1 (played by Adam) DEAD header: teaser: /assets/images/PC-MaphismoPortrait-thumb.png author_profile: true --- {% assign pc = site.data.pcs[page.pcid] %} ### {{ pc.class }} {{ pc.level }} **Current Location:** {{ pc.location }} **Current XP:** {{ pc.xp }} ![Maphismo (_Adam_)](/assets/images/PC-Maphismo.2020.09.22.jpg) ## Posts {% if paginator %} {% assign posts = paginator.posts %} {% else %} {% assign posts = site.posts %} {% endif %} {% assign entries_layout = page.entries_layout | default: 'list' %} {% assign filtered_posts = posts | where: 'author', page.author %} <div class="entries-{{ entries_layout }}"> {% for post in filtered_posts %} {% include archive-single.html type=entries_layout %} {% endfor %} </div> {% include paginator.html %} <!-- {% assign filtered_posts = site.posts | where: 'author', page.author %} --> <!-- {% for post in filtered_posts %} --> <!-- - [{{ post.title }}]({{ post.url }}) --> <!-- {% endfor %} -->
25.255814
80
0.626151
eng_Latn
0.640997
1199b0059b53f8bb0963ad072176ca6d15eb6a0e
80
md
Markdown
README.md
Anduin-Zhu/Test_framework
4d8fd1da395b72274d6a429ad6a29018a394f2a3
[ "Apache-2.0" ]
null
null
null
README.md
Anduin-Zhu/Test_framework
4d8fd1da395b72274d6a429ad6a29018a394f2a3
[ "Apache-2.0" ]
null
null
null
README.md
Anduin-Zhu/Test_framework
4d8fd1da395b72274d6a429ad6a29018a394f2a3
[ "Apache-2.0" ]
null
null
null
## 环境 - Python 3 - selenium 2.53 - PyYaml - xlrd - requests - JMESPath - Faker
8
15
0.6375
kor_Hang
0.385794
119a371b0a791afd9fea0608e2dd76e9b941f2bc
40
md
Markdown
README.md
marlonbymendes/github-oauth-example
127d799b0209080db82b5a04953a269a5cf09dea
[ "MIT" ]
null
null
null
README.md
marlonbymendes/github-oauth-example
127d799b0209080db82b5a04953a269a5cf09dea
[ "MIT" ]
null
null
null
README.md
marlonbymendes/github-oauth-example
127d799b0209080db82b5a04953a269a5cf09dea
[ "MIT" ]
null
null
null
# Django example for Log in with Github
20
39
0.775
eng_Latn
0.999856
119ad4836661045790930e9db48b2f954a2d5a77
1,501
md
Markdown
README.md
alexa-samples/apl-video-demo
796332b9f491e9378ca65242804b1b71813f6e6a
[ "Apache-2.0" ]
null
null
null
README.md
alexa-samples/apl-video-demo
796332b9f491e9378ca65242804b1b71813f6e6a
[ "Apache-2.0" ]
1
2021-11-15T03:39:42.000Z
2021-11-15T03:39:42.000Z
README.md
alexa-samples/apl-video-demo
796332b9f491e9378ca65242804b1b71813f6e6a
[ "Apache-2.0" ]
null
null
null
# APL Video Demo <img src="https://m.media-amazon.com/images/G/01/mobile-apps/dex/alexa/alexa-skills-kit/tutorials/quiz-game/header._TTH_.png" /> This tutorial that demonstrates how to implement a skill that plays a video using APL (Alexa Presentation Language) and how to voice control the playback such as _play_ and _pause_. ## What You Will Need * [Amazon Developer Account](http://developer.amazon.com/alexa) * [Amazon Web Services Account](http://aws.amazon.com/) * [ASK CLI](https://developer.amazon.com/docs/smapi/quick-start-alexa-skills-kit-command-line-interface.html) * The sample code on [GitHub](https://github.com/alexa-samples/apl-video-demo). ## Setting Up the Demo This folder contains the (1) interaction model, (2) APL document and (3) skill code. It is structured to make it easy to deploy if you have the ASK CLI already setup. If you would like to use the Developer Portal, you can 1. follow the steps outlined in the [Hello World](https://github.com/alexa/skill-sample-nodejs-hello-world) example 1. substituting the [Model](./models/en-US.json) 1. create an `./lambda/custom/apl` folder 1. add [video.json](./lambda/custom/apl/video.json) to the `apl` folder 1. add [skill code](./lambda/custom/index.js) to `./lambda/custom/`. ## Running the Demo **You**: _"alexa, open video demo."_ **Alexa**: _"Welcome to the APL Video Demo! You can control the video playback by saying play or pause."_ **You**: - _"pause"_ - _"pause video"_ - _"play"_ - _"play video"_
42.885714
181
0.734177
eng_Latn
0.869268
119be01c61513792b7cd8607940a205af1448155
10,824
md
Markdown
articles/azure-resource-manager/resource-group-template-deploy-rest.md
CatchRetry/azure-docs.fr-fr
1ccd071caa483cc19d4d9b8c1c59104b1a7e6438
[ "CC-BY-4.0" ]
null
null
null
articles/azure-resource-manager/resource-group-template-deploy-rest.md
CatchRetry/azure-docs.fr-fr
1ccd071caa483cc19d4d9b8c1c59104b1a7e6438
[ "CC-BY-4.0" ]
null
null
null
articles/azure-resource-manager/resource-group-template-deploy-rest.md
CatchRetry/azure-docs.fr-fr
1ccd071caa483cc19d4d9b8c1c59104b1a7e6438
[ "CC-BY-4.0" ]
null
null
null
--- title: Déploiement de ressources avec le modèle et l’API REST | Microsoft Docs description: Utilisez Azure Resource Manager et l’API REST Resource Manager pour déployer des ressources sur Azure. Les ressources sont définies dans un modèle Resource Manager. services: azure-resource-manager documentationcenter: na author: tfitzmac ms.assetid: 1d8fbd4c-78b0-425b-ba76-f2b7fd260b45 ms.service: azure-resource-manager ms.devlang: na ms.topic: conceptual ms.tgt_pltfrm: na ms.workload: na ms.date: 10/26/2018 ms.author: tomfitz ms.openlocfilehash: 058d6d398f6bb54e8569e727f118a325c338049d ms.sourcegitcommit: 48592dd2827c6f6f05455c56e8f600882adb80dc ms.translationtype: HT ms.contentlocale: fr-FR ms.lasthandoff: 10/26/2018 ms.locfileid: "50154740" --- # <a name="deploy-resources-with-resource-manager-templates-and-resource-manager-rest-api"></a>Déployer des ressources à l’aide de modèles Resource Manager et de l’API REST Resource Manager Cet article explique comment utiliser l’API REST Resource Manager avec les modèles Resource Manager pour déployer vos ressources dans Azure. > [!TIP] > Pour obtenir de l’aide dans le débogage d’une erreur pendant le déploiement, consultez : > > * [View deployment operations (Afficher les opérations de déploiement)](resource-manager-deployment-operations.md) pour apprendre à récupérer des informations qui vous aideront à résoudre votre erreur > * [Résoudre les erreurs courantes lors du déploiement de ressources sur Azure avec Azure Resource Manager](resource-manager-common-deployment-errors.md) pour apprendre à résoudre les erreurs de déploiement courantes > > Vous pouvez inclure votre modèle dans le corps de la requête ou un lien vers un fichier. Si vous utilisez un fichier, il peut s’agir d’un fichier local ou d’un fichier externe disponible par le biais d’un URI. Lorsque votre modèle se trouve dans un compte de stockage, vous pouvez restreindre l’accès au modèle et fournir un jeton de signature d’accès partagé (SAP) au cours du déploiement. ## <a name="deploy-with-the-rest-api"></a>Déployer avec l’API REST 1. Définissez [des en-têtes et des paramètres communs](/rest/api/azure/), y compris des jetons d’authentification. 1. Si vous n’avez pas de groupe de ressources, créez-en un. Fournissez votre ID abonnement, le nom du groupe de ressources et l’emplacement dont vous avez besoin pour votre solution. Pour plus d’informations, consultez [Créer un groupe de ressources](/rest/api/resources/resourcegroups/createorupdate). ```HTTP PUT https://management.azure.com/subscriptions/<YourSubscriptionId>/resourcegroups/<YourResourceGroupName>?api-version=2018-05-01 ``` Avec un corps de requête comme suit : ```json { "location": "West US", "tags": { "tagname1": "tagvalue1" } } ``` 1. Validez votre déploiement avant son exécution en exécutant l’opération [Valider un déploiement de modèle](/rest/api/resources/deployments/validate) . Lorsque vous testez le déploiement, indiquez les paramètres exactement comme vous le feriez lors de l'exécution du déploiement (voir l'étape suivante). 1. Créez un déploiement. Fournissez votre ID abonnement, le nom du groupe de ressources, le nom du déploiement et un lien vers votre modèle. Pour plus d’informations sur le fichier de modèle, consultez [Fichier de paramètres](#parameter-file). Pour plus d’informations sur l’API REST pour créer un groupe de ressources, consultez [Créer un déploiement de modèle](/rest/api/resources/deployments/createorupdate). Notez que le **Mode** est défini sur **Incremental (Incrémentiel)**. Pour exécuter un déploiement complet, définissez le paramètre **Mode** sur la valeur **Complete (Terminé)**. Soyez prudent lorsque vous utilisez le mode complet, car vous pouvez supprimer par inadvertance des ressources qui ne sont pas dans votre modèle. ```HTTP PUT https://management.azure.com/subscriptions/<YourSubscriptionId>/resourcegroups/<YourResourceGroupName>/providers/Microsoft.Resources/deployments/<YourDeploymentName>?api-version=2018-05-01 ``` Avec un corps de requête comme suit : ```json { "properties": { "templateLink": { "uri": "http://mystorageaccount.blob.core.windows.net/templates/template.json", "contentVersion": "1.0.0.0" }, "mode": "Incremental", "parametersLink": { "uri": "http://mystorageaccount.blob.core.windows.net/templates/parameters.json", "contentVersion": "1.0.0.0" } } } ``` Si vous souhaitez consigner le contenu de la réponse et/ou le contenu de la demande, incluez **debugSetting** dans la demande. ```json "debugSetting": { "detailLevel": "requestContent, responseContent" } ``` Vous pouvez configurer votre compte de stockage pour qu’il utilise un jeton de signature d’accès partagé (SAP). Pour plus d’informations, consultez [Délégation de l’accès avec une signature d’accès partagé](https://docs.microsoft.com/rest/api/storageservices/delegating-access-with-a-shared-access-signature). 1. Au lieu d’effectuer la liaison vers des fichiers pour le modèle et les paramètres, vous pouvez les inclure dans le corps de la requête. ```json { "properties": { "mode": "Incremental", "template": { "$schema": "https://schema.management.azure.com/schemas/2015-01-01/deploymentTemplate.json#", "contentVersion": "1.0.0.0", "parameters": { "storageAccountType": { "type": "string", "defaultValue": "Standard_LRS", "allowedValues": [ "Standard_LRS", "Standard_GRS", "Standard_ZRS", "Premium_LRS" ], "metadata": { "description": "Storage Account type" } }, "location": { "type": "string", "defaultValue": "[resourceGroup().location]", "metadata": { "description": "Location for all resources." } } }, "variables": { "storageAccountName": "[concat(uniquestring(resourceGroup().id), 'standardsa')]" }, "resources": [ { "type": "Microsoft.Storage/storageAccounts", "name": "[variables('storageAccountName')]", "apiVersion": "2018-02-01", "location": "[parameters('location')]", "sku": { "name": "[parameters('storageAccountType')]" }, "kind": "StorageV2", "properties": {} } ], "outputs": { "storageAccountName": { "type": "string", "value": "[variables('storageAccountName')]" } } }, "parameters": { "location": { "value": "eastus2" } } } } ``` 5. Obtenez l’état du déploiement du modèle. Pour plus d’informations, consultez [Obtenir des informations sur le déploiement d’un modèle](/rest/api/resources/deployments/get). ```HTTP GET https://management.azure.com/subscriptions/<YourSubscriptionId>/resourcegroups/<YourResourceGroupName>/providers/Microsoft.Resources/deployments/<YourDeploymentName>?api-version=2018-05-01 ``` ## <a name="redeploy-when-deployment-fails"></a>Redéploiement en cas d’échec du déploiement Pour les déploiements qui échouent, vous pouvez spécifier le redéploiement automatique d’un déploiement précédent dans votre historique de déploiement. Pour utiliser cette option, vos déploiements doivent avoir des noms uniques afin de pouvoir être identifiés dans l’historique. Si les noms ne sont pas uniques, le déploiement actuellement en échec peut remplacer le déploiement réussi précédemment dans l’historique. Vous pouvez uniquement utiliser cette option avec les déploiements de niveau racine. Les déploiements à partir d’un modèle imbriqué ne sont pas disponibles pour le redéploiement. Pour redéployer le dernier déploiement réussi en cas d’échec du déploiement actuel, utilisez : ```json "onErrorDeployment": { "type": "LastSuccessful", }, ``` Pour redéployer un déploiement spécifique en cas d’échec du déploiement actuel, utilisez : ```json "onErrorDeployment": { "type": "SpecificDeployment", "deploymentName": "<deploymentname>" } ``` Le déploiement spécifié doit avoir réussi. ## <a name="parameter-file"></a>Fichier de paramètres Si vous utilisez un fichier de paramètres pour transmettre des valeurs lors du déploiement, vous devez créer un fichier JSON avec un format similaire à l’exemple suivant : ```json { "$schema": "https://schema.management.azure.com/schemas/2015-01-01/deploymentParameters.json#", "contentVersion": "1.0.0.0", "parameters": { "webSiteName": { "value": "ExampleSite" }, "webSiteHostingPlanName": { "value": "DefaultPlan" }, "webSiteLocation": { "value": "West US" }, "adminPassword": { "reference": { "keyVault": { "id": "/subscriptions/{guid}/resourceGroups/{group-name}/providers/Microsoft.KeyVault/vaults/{vault-name}" }, "secretName": "sqlAdminPassword" } } } } ``` La taille du fichier de paramètres ne peut pas être supérieure à 64 Ko. Pour fournir une valeur sensible pour un paramètre (par exemple, un mot de passe), ajoutez cette valeur à un coffre de clés. Récupérez le coffre de clés pendant le déploiement comme indiqué dans l’exemple précédent. Pour plus d’informations, consultez [Passage de valeurs sécurisés pendant le déploiement](resource-manager-keyvault-parameter.md). ## <a name="next-steps"></a>Étapes suivantes * Pour spécifier comment gérer les ressources présentes dans le groupe de ressources, mais non définies dans le modèle, consultez [Modes de déploiement Azure Resource Manager](deployment-modes.md). * Pour plus d’informations sur la gestion des opérations REST asynchrones, consultez [Track asynchronous Azure operations (Suivi des opérations asynchrones Azure)](resource-manager-async-operations.md). * Pour découvrir un exemple de déploiement de ressources par le biais de la bibliothèque cliente .NET, consultez [Déployer des ressources à l’aide de bibliothèques .NET et d’un modèle](../virtual-machines/windows/csharp-template.md?toc=%2fazure%2fvirtual-machines%2fwindows%2ftoc.json). * Pour définir des paramètres dans le modèle, consultez [Création de modèles](resource-group-authoring-templates.md#parameters). * Pour obtenir des conseils sur l’utilisation de Resource Manager par les entreprises pour gérer efficacement les abonnements, voir [Structure d’Azure Enterprise - Gouvernance normative de l’abonnement](/azure/architecture/cloud-adoption-guide/subscription-governance).
48.321429
735
0.705377
fra_Latn
0.940636
119c915aa9396924d84107f3ebf3bb398e4bf8fc
20,759
md
Markdown
docs/vsto/walkthrough-creating-a-template-by-using-content-controls.md
BrainCraze/visualstudio-docs.de-de
3758c943d5f4eacbdc0d975cb114f287018463dd
[ "CC-BY-4.0", "MIT" ]
null
null
null
docs/vsto/walkthrough-creating-a-template-by-using-content-controls.md
BrainCraze/visualstudio-docs.de-de
3758c943d5f4eacbdc0d975cb114f287018463dd
[ "CC-BY-4.0", "MIT" ]
null
null
null
docs/vsto/walkthrough-creating-a-template-by-using-content-controls.md
BrainCraze/visualstudio-docs.de-de
3758c943d5f4eacbdc0d975cb114f287018463dd
[ "CC-BY-4.0", "MIT" ]
null
null
null
--- title: 'Exemplarische Vorgehensweise: Erstellen einer Vorlage mithilfe von Inhaltssteuerelementen | Microsoft Docs' ms.custom: '' ms.date: 02/02/2017 ms.technology: - office-development ms.topic: conceptual dev_langs: - VB - CSharp helpviewer_keywords: - building blocks [Office development in Visual Studio] - Word [Office development in Visual Studio], creating documents - content controls [Office development in Visual Studio], adding to documents author: TerryGLee ms.author: tglee manager: douge ms.workload: - office ms.openlocfilehash: 8c7f5026d4cbe8b7c38b8163ce00d893e1e406f2 ms.sourcegitcommit: 6a9d5bd75e50947659fd6c837111a6a547884e2a ms.translationtype: MT ms.contentlocale: de-DE ms.lasthandoff: 04/16/2018 --- # <a name="walkthrough-creating-a-template-by-using-content-controls"></a>Exemplarische Vorgehensweise: Erstellen einer Vorlage mithilfe von Inhaltssteuerelementen Diese exemplarische Vorgehensweise veranschaulicht, wie eine Anpassung auf Dokumentebene erstellt wird, die Inhaltssteuerelemente zum Erstellen strukturierter und wiederverwendbarer Inhalte in einer Microsoft Office Word-Vorlage verwendet. [!INCLUDE[appliesto_wdalldoc](../vsto/includes/appliesto-wdalldoc-md.md)] Word können Sie eine Sammlung wiederverwendbarer Dokumentbausteine erstellen *Bausteine*. Diese exemplarische Vorgehensweise veranschaulicht, wie zwei Tabellen als Bausteine erstellt werden. Jede Tabelle enthält mehrere Inhaltssteuerelemente, die unterschiedliche Inhaltstypen aufweisen können, z. B. reinen Text oder Datumsangaben. Eine der Tabellen enthält Informationen über einen Mitarbeiter und die andere Kundenfeedback. Nachdem Sie ein Dokument von der Vorlage erstellt haben, können Sie ihm mithilfe mehrerer <xref:Microsoft.Office.Tools.Word.BuildingBlockGalleryContentControl>-Objekte, die die verfügbaren Bausteine in der Vorlage anzeigen, eine der beiden Tabellen hinzufügen. In dieser exemplarischen Vorgehensweise werden die folgenden Aufgaben veranschaulicht: - Erstellen von Tabellen, die Inhaltssteuerelemente in einer Word-Vorlage enthalten, zur Entwurfszeit - Programmgesteuertes Auffüllen eines Kombinationsfeld-Inhaltssteuerelements und eines Dropdownlisten-Inhaltssteuerelements - Verhindern, dass Benutzer eine bestimmte Tabelle bearbeiten - Hinzufügen von Tabellen zur Bausteinauflistung einer Vorlage - Erstellen eines Inhaltssteuerelements, das die verfügbaren Bausteine in der Vorlage anzeigt [!INCLUDE[note_settings_general](../sharepoint/includes/note-settings-general-md.md)] ## <a name="prerequisites"></a>Erforderliche Komponenten Zum Durchführen dieser exemplarischen Vorgehensweise benötigen Sie die folgenden Komponenten: - [!INCLUDE[vsto_vsprereq](../vsto/includes/vsto-vsprereq-md.md)] - Microsoft Word. ## <a name="creating-a-new-word-template-project"></a>Erstellen eines neuen Word-Vorlagenprojekts Erstellen Sie eine Word-Vorlage, damit Benutzer leicht eigene Kopien erstellen können. #### <a name="to-create-a-new-word-template-project"></a>So erstellen Sie ein neues Word-Vorlagenprojekt 1. Erstellen Sie ein Word-Vorlagenprojekt mit dem Namen **"MyBuildingBlockTemplate"**. Erstellen Sie im Assistenten ein neues Dokument in der Projektmappe. Weitere Informationen finden Sie unter [How to: Create Office Projects in Visual Studio](../vsto/how-to-create-office-projects-in-visual-studio.md). [!INCLUDE[vsprvs](../sharepoint/includes/vsprvs-md.md)] Öffnet das neue Word-Vorlage im Designer und fügt die **"MyBuildingBlockTemplate"** Projekt **Projektmappen-Explorer**. ## <a name="creating-the-employee-table"></a>Erstellen der Mitarbeitertabelle Erstellen Sie eine Tabelle, die vier verschiedene Typen von Inhaltssteuerelementen enthält, in denen der Benutzer Informationen zu einem Mitarbeiter eingeben kann. #### <a name="to-create-the-employee-table"></a>So erstellen Sie die Mitarbeitertabelle 1. In der Word-Vorlage, die in gehostet ist die [!INCLUDE[vsprvs](../sharepoint/includes/vsprvs-md.md)] klicken Sie auf dem Menüband-Designer auf die **einfügen** Registerkarte. 2. In der **Tabellen** zu gruppieren, klicken Sie auf **Tabelle**, und fügen Sie eine Tabelle mit 2 Spalten und 4 Zeilen. 3. Geben Sie in der ersten Spalte Text ein, sodass sie der folgenden Spalte ähnelt: || |-| |**Name des Mitarbeiters**| |**Einstellungsdatum**| |**Titel**| |**Bild**| 4. Klicken Sie in der ersten Zelle in der zweiten Spalte (neben **Mitarbeiternamen**). 5. Klicken Sie im Menüband auf die Registerkarte **Entwickler** . > [!NOTE] > Wenn die Registerkarte **Entwickler** nicht sichtbar ist, müssen Sie diese zuerst anzeigen. Weitere Informationen finden Sie unter [Gewusst wie: Anzeigen der Registerkarte "Entwickler" auf der Multifunktionsleiste](../vsto/how-to-show-the-developer-tab-on-the-ribbon.md). 6. In der **Steuerelemente** zu gruppieren, klicken Sie auf die **Text** Schaltfläche ![PlainTextContentControl](../vsto/media/plaintextcontrol.gif "PlainTextContentControl") hinzuzufügende eine <xref:Microsoft.Office.Tools.Word.PlainTextContentControl>bis zur ersten Zelle. 7. Klicken Sie auf der zweiten Zelle in der zweiten Spalte (neben **Hire Date**). 8. In der **Steuerelemente** zu gruppieren, klicken Sie auf die **Datumsauswahl** Schaltfläche ![DatePickerContentControl](../vsto/media/datepicker.gif "DatePickerContentControl") eine Hinzufügen<xref:Microsoft.Office.Tools.Word.DatePickerContentControl> der zweiten Zelle. 9. Klicken Sie auf die dritte Zelle in der zweiten Spalte (neben **Titel**). 10. In der **Steuerelemente** zu gruppieren, klicken Sie auf die **Kombinationsfeld** Schaltfläche ![ComboBoxContentControl](../vsto/media/combobox.gif "ComboBoxContentControl") hinzuzufügende eine <xref:Microsoft.Office.Tools.Word.ComboBoxContentControl>der dritten Zelle. 11. Klicken Sie auf die letzte Zelle in der zweiten Spalte (neben **Bild**). 12. In der **Steuerelemente** zu gruppieren, klicken Sie auf die **Bild-Inhaltssteuerelement** Schaltfläche ![PictureContentControl](../vsto/media/pictcontentcontrol.gif "PictureContentControl") Hinzufügen einer <xref:Microsoft.Office.Tools.Word.PictureContentControl> bis zur letzten Zelle. ## <a name="creating-the-customer-feedback-table"></a>Erstellen der Kundenfeedback-Tabelle Erstellen Sie eine Tabelle, die drei verschiedene Typen von Inhaltssteuerelementen enthält, in der der Benutzer Informationen zu Kundenfeedback eingeben kann. #### <a name="to-create-the-customer-feedback-table"></a>So erstellen Sie die Kundenfeedback-Tabelle 1. Klicken Sie in der Word-Vorlage in die Zeile nach der Mitarbeitertabelle, die Sie zuvor hinzugefügt haben, und drücken Sie EINGABE, um einen neuen Absatz hinzuzufügen. 2. Klicken Sie auf dem Menüband auf die **einfügen** Registerkarte. 3. In der **Tabellen** zu gruppieren, klicken Sie auf **Tabelle**, und fügen Sie eine Tabelle mit 2 Spalten und 3 Zeilen. 4. Geben Sie in der ersten Spalte Text ein, sodass sie der folgenden Spalte ähnelt: || |-| |**Kundenname**| |**Die Zufriedenheit**| |**Kommentare**| 5. Klicken Sie in der ersten Zelle der zweiten Spalte (neben **Kundenname**). 6. Klicken Sie im Menüband auf die Registerkarte **Entwickler** . 7. In der **Steuerelemente** zu gruppieren, klicken Sie auf die **Text** Schaltfläche ![PlainTextContentControl](../vsto/media/plaintextcontrol.gif "PlainTextContentControl") hinzuzufügende eine <xref:Microsoft.Office.Tools.Word.PlainTextContentControl>bis zur ersten Zelle. 8. Klicken Sie in der zweiten Zelle der zweiten Spalte (neben **Satisfaction Rating**). 9. In der **Steuerelemente** zu gruppieren, klicken Sie auf die **Dropdown-Liste** Schaltfläche ![DropDownListContentControl](../vsto/media/dropdownlist.gif "DropDownListContentControl") Hinzufügen einer <xref:Microsoft.Office.Tools.Word.DropDownListContentControl> der zweiten Zelle. 10. Klicken Sie in der letzten Zelle der zweiten Spalte (neben **Kommentare**). 11. In der **Steuerelemente** zu gruppieren, klicken Sie auf die **Rich-Text** Schaltfläche ![RichTextContentControl](../vsto/media/richtextcontrol.gif "RichTextContentControl") hinzuzufügende eine <xref:Microsoft.Office.Tools.Word.RichTextContentControl>bis zur letzten Zelle. ## <a name="populating-the-combo-box-and-drop-down-list-programmatically"></a>Programmgesteuertes Auffüllen des Kombinationsfelds und der Dropdownliste Sie können Inhaltssteuerelemente zur Entwurfszeit initialisieren, mithilfe der **Eigenschaften** Fenster in [!INCLUDE[vsprvs](../sharepoint/includes/vsprvs-md.md)]. Sie können sie auch zur Laufzeit initialisieren, wodurch Sie deren Anfangszustände dynamisch festlegen können. In dieser exemplarischen Vorgehensweise verwenden Sie Code zum Auffüllen der Einträge in der <xref:Microsoft.Office.Tools.Word.ComboBoxContentControl> und <xref:Microsoft.Office.Tools.Word.DropDownListContentControl> zur Laufzeit, damit Sie sehen, wie diese Objekte funktionieren. #### <a name="to-modify-the-ui-of-the-content-controls-programmatically"></a>So ändern Sie die Benutzeroberfläche der Inhaltssteuerelemente programmgesteuert 1. In **Projektmappen-Explorer**, mit der rechten Maustaste **ThisDocument.cs** oder **ThisDocument.vb**, und klicken Sie dann auf **Code anzeigen**. 2. Fügen Sie der `ThisDocument`-Klasse folgenden Code hinzu. Dieser Code deklariert mehrere Objekte, die Sie später in dieser exemplarischen Vorgehensweise verwenden. [!code-vb[Trin_ContentControlTemplateWalkthrough#1](../vsto/codesnippet/VisualBasic/ContentControlTemplateWalkthrough/ThisDocument.vb#1)] [!code-csharp[Trin_ContentControlTemplateWalkthrough#1](../vsto/codesnippet/CSharp/ContentControlTemplateWalkthrough/ThisDocument.cs#1)] 3. Fügen Sie der `ThisDocument_Startup`-Methode der `ThisDocument`-Klasse den folgenden Code hinzu. Durch diesen Code werden <xref:Microsoft.Office.Tools.Word.ComboBoxContentControl> und <xref:Microsoft.Office.Tools.Word.DropDownListContentControl> in den Tabellen Einträge hinzugefügt und der Platzhaltertext festgelegt, der in den einzelnen Steuerelementen angezeigt wird, bevor sie vom Benutzer bearbeitet werden. [!code-vb[Trin_ContentControlTemplateWalkthrough#2](../vsto/codesnippet/VisualBasic/ContentControlTemplateWalkthrough/ThisDocument.vb#2)] [!code-csharp[Trin_ContentControlTemplateWalkthrough#2](../vsto/codesnippet/CSharp/ContentControlTemplateWalkthrough/ThisDocument.cs#2)] ## <a name="preventing-users-from-editing-the-employee-table"></a>Verhindern, dass Benutzer die Mitarbeitertabelle bearbeiten Verwenden Sie das zuvor deklarierte <xref:Microsoft.Office.Tools.Word.GroupContentControl>-Objekt, um die Mitarbeitertabelle zu schützen. Nachdem Sie die Tabelle geschützt haben, können Benutzer die Inhaltssteuerelemente in der Tabelle immer noch bearbeiten. Allerdings können sie in der ersten Spalte keinen Text bearbeiten oder die Tabelle auf andere Weise ändern, z. B. Zeilen und Spalten hinzufügen oder löschen. Weitere Informationen zur Verwendung einer <xref:Microsoft.Office.Tools.Word.GroupContentControl> einen Teil eines Dokuments schützen, finden Sie unter [Inhaltssteuerelemente](../vsto/content-controls.md). #### <a name="to-prevent-users-from-editing-the-employee-table"></a>So verhindern Sie, dass Benutzer die Mitarbeitertabelle bearbeiten 1. Fügen Sie der `ThisDocument_Startup`-Methode der `ThisDocument`-Klasse nach dem im vorherigen Schritt hinzugefügten Code den folgenden Code hinzu. Dieser Code verhindert, dass Benutzer die Mitarbeitertabelle bearbeiten, indem die Tabelle in das zuvor deklarierte <xref:Microsoft.Office.Tools.Word.GroupContentControl>-Objekt eingefügt wird. [!code-vb[Trin_ContentControlTemplateWalkthrough#3](../vsto/codesnippet/VisualBasic/ContentControlTemplateWalkthrough/ThisDocument.vb#3)] [!code-csharp[Trin_ContentControlTemplateWalkthrough#3](../vsto/codesnippet/CSharp/ContentControlTemplateWalkthrough/ThisDocument.cs#3)] ## <a name="adding-the-tables-to-the-building-block-collection"></a>Hinzufügen der Tabellen zur Bausteinauflistung Fügen Sie die Tabellen einer Auflistung von Dokumentbausteinen in der Vorlage hinzu, sodass Benutzer die Tabellen, die Sie erstellt haben, in das Dokument einfügen können. Weitere Informationen zu Dokumentbausteinen finden Sie unter [Inhaltssteuerelemente](../vsto/content-controls.md). #### <a name="to-add-the-tables-to-the-building-blocks-in-the-template"></a>So fügen Sie die Tabellen den Bausteinen in der Vorlage hinzu 1. Fügen Sie der `ThisDocument_Startup`-Methode der `ThisDocument`-Klasse nach dem im vorherigen Schritt hinzugefügten Code den folgenden Code hinzu. Dieser Code fügt neue Bausteine, die die Tabellen in der Auflistung Microsoft.Office.Interop.Word.BuildingBlockEntries enthalten, die alle wiederverwendbaren Bausteine in der Vorlage enthält. Die neuen Bausteine werden in eine neue Kategorie mit dem Namen definiert **Employee and Customer Information** Bausteintyp wdTypeCustom1 zugeordnet wurde zugewiesen. [!code-vb[Trin_ContentControlTemplateWalkthrough#4](../vsto/codesnippet/VisualBasic/ContentControlTemplateWalkthrough/ThisDocument.vb#4)] [!code-csharp[Trin_ContentControlTemplateWalkthrough#4](../vsto/codesnippet/CSharp/ContentControlTemplateWalkthrough/ThisDocument.cs#4)] 2. Fügen Sie der `ThisDocument_Startup`-Methode der `ThisDocument`-Klasse nach dem im vorherigen Schritt hinzugefügten Code den folgenden Code hinzu. Dieser Code löscht die Tabellen aus der Vorlage. Die Tabellen sind nicht mehr erforderlich, da Sie sie dem Katalog wiederverwendbarer Bausteine in der Vorlage hinzugefügt haben. Durch den Code wird das Dokument zuerst in den Entwurfsmodus versetzt, sodass die geschützte Mitarbeitertabelle gelöscht werden kann. [!code-vb[Trin_ContentControlTemplateWalkthrough#5](../vsto/codesnippet/VisualBasic/ContentControlTemplateWalkthrough/ThisDocument.vb#5)] [!code-csharp[Trin_ContentControlTemplateWalkthrough#5](../vsto/codesnippet/CSharp/ContentControlTemplateWalkthrough/ThisDocument.cs#5)] ## <a name="creating-a-content-control-that-displays-the-building-blocks"></a>Erstellen eines Inhaltssteuerelements, das die Bausteine anzeigt Erstellen Sie ein Inhaltssteuerelement, das den Zugriff auf die Bausteine (d. h. die Tabellen) ermöglicht, die Sie zuvor erstellt haben. Benutzer können auf dieses Steuerelement klicken, um die Tabellen dem Dokument hinzuzufügen. #### <a name="to-create-a-content-control-that-displays-the-building-blocks"></a>So erstellen Sie ein Inhaltssteuerelement, das die Bausteine anzeigt 1. Fügen Sie der `ThisDocument_Startup`-Methode der `ThisDocument`-Klasse nach dem im vorherigen Schritt hinzugefügten Code den folgenden Code hinzu. Dieser Code initialisiert das zuvor deklarierte <xref:Microsoft.Office.Tools.Word.BuildingBlockGalleryContentControl>-Objekt. Die <xref:Microsoft.Office.Tools.Word.BuildingBlockGalleryContentControl> zeigt alle Bausteine, die in der Kategorie definiert sind **Employee and Customer Information** und bei denen den Bausteintyp wdTypeCustom1 zugeordnet wurde. [!code-vb[Trin_ContentControlTemplateWalkthrough#6](../vsto/codesnippet/VisualBasic/ContentControlTemplateWalkthrough/ThisDocument.vb#6)] [!code-csharp[Trin_ContentControlTemplateWalkthrough#6](../vsto/codesnippet/CSharp/ContentControlTemplateWalkthrough/ThisDocument.cs#6)] ## <a name="testing-the-project"></a>Testen des Projekts Benutzer können auf die Steuerelemente im Bausteinkatalog im Dokument klicken, um die Mitarbeiter- oder Kundenfeedback-Tabelle einzufügen. Benutzer können in den Inhaltssteuerelementen in beiden Tabellen Antworten eingeben oder auswählen. Benutzer können andere Teile der Kundenfeedback-Tabelle ändern, sollten aber nicht in der Lage sein, andere Teile der Mitarbeitertabelle zu ändern. #### <a name="to-test-the-employee-table"></a>So testen Sie die Mitarbeitertabelle 1. Drücken Sie F5, um das Projekt auszuführen. 2. Klicken Sie auf **ersten Baustein auswählen** auf das erste Steuerelement für Inhalt anzuzeigen. 3. Klicken Sie auf den Dropdown Pfeil neben der **benutzerdefinierter Katalog 1** Überschrift im Steuerelement, und wählen Sie **Mitarbeitertabelle**. 4. Klicken Sie auf die Zelle rechts neben der **Mitarbeiternamen** Zelle und geben Sie einen Namen. Stellen Sie sicher, dass Sie dieser Zelle nur Text hinzufügen können. <xref:Microsoft.Office.Tools.Word.PlainTextContentControl> ermöglicht es Benutzern, nur Text und keine anderen Inhaltstypen wie Grafiken oder Tabellen hinzuzufügen. 5. Klicken Sie auf die Zelle rechts neben der **Hire Date** Zelle, und wählen Sie in der Datumsauswahl ein Datum. 6. Klicken Sie auf die Zelle rechts neben der **Titel** Zelle, und wählen Sie eine der Berufsbezeichnungen im Kombinationsfeld. Geben Sie optional eine Berufsbezeichnung ein, die nicht in der Liste enthalten ist. Dies ist möglich, weil <xref:Microsoft.Office.Tools.Word.ComboBoxContentControl> Benutzern die Auswahl aus einer Liste von Einträgen oder die Eingabe eigener Einträge ermöglicht. 7. Klicken Sie auf das Symbol in der Zelle rechts neben der **Bild** Zelle, und navigieren Sie zu einem Bild, um es anzuzeigen. 8. Versuchen Sie, der Tabelle Zeilen oder Spalten hinzuzufügen und Zeilen und Spalten aus der Tabelle zu löschen. Vergewissern Sie sich, dass Sie die Tabelle nicht ändern können. <xref:Microsoft.Office.Tools.Word.GroupContentControl> verhindert, dass Sie Änderungen vornehmen. #### <a name="to-test-the-customer-feedback-table"></a>So testen Sie die Kundenfeedback-Tabelle 1. Klicken Sie auf **zweiten Baustein auswählen** um das zweite Steuerelement für Inhalt anzuzeigen. 2. Klicken Sie auf den Dropdown Pfeil neben der **benutzerdefinierter Katalog 1** Überschrift im Steuerelement, und wählen Sie **Kundentabelle**. 3. Klicken Sie auf die Zelle rechts neben der **Kundenname** Zelle und geben Sie einen Namen. 4. Klicken Sie auf die Zelle rechts neben der **Satisfaction Rating** Zelle, und wählen Sie eine der verfügbaren Optionen. Stellen Sie sicher, dass Sie ihren eigenen Eintrag nicht eingeben können. <xref:Microsoft.Office.Tools.Word.DropDownListContentControl> ermöglicht Benutzern nur die Auswahl aus einer Liste von Einträgen. 5. Klicken Sie auf die Zelle rechts neben der **Kommentare** Zelle und geben Sie Kommentare. Fügen Sie optional andere Inhalte als Text hinzu, z. B. Grafiken oder eine eingebettete Tabelle. Dies ist möglich, weil <xref:Microsoft.Office.Tools.Word.RichTextContentControl> Benutzern das Hinzufügen anderer Inhalt als Text ermöglicht. 6. Vergewissern Sie sich, dass Sie der Tabelle Zeilen oder Spalten hinzufügen und Zeilen und Spalten aus der Tabelle löschen können. Dies ist möglich, da Sie die Tabelle nicht geschützt haben, indem Sie sie in <xref:Microsoft.Office.Tools.Word.GroupContentControl> eingefügt haben. 7. Schließen Sie die Vorlage. ## <a name="next-steps"></a>Nächste Schritte Weitere Informationen zur Verwendung von Inhaltssteuerelementen finden Sie in diesem Thema: - Binden von Inhaltssteuerelementen an XML-Elemente (werden auch als benutzerdefinierte XML-Teile bezeichnet), die in ein Dokument eingebettet sind Weitere Informationen finden Sie unter [Exemplarische Vorgehensweise: Binden von Inhaltssteuerelementen an benutzerdefinierte XML-Abschnitte](../vsto/walkthrough-binding-content-controls-to-custom-xml-parts.md). ## <a name="see-also"></a>Siehe auch [Automatisieren von Word mithilfe von erweiterten Objekten](../vsto/automating-word-by-using-extended-objects.md) [Content-Steuerelemente](../vsto/content-controls.md) [Vorgehensweise: Hinzufügen von Inhaltssteuerelementen zu Word-Dokumenten](../vsto/how-to-add-content-controls-to-word-documents.md) [Vorgehensweise: Schützen von Teilen von Dokumenten mithilfe von Inhaltssteuerelementen](../vsto/how-to-protect-parts-of-documents-by-using-content-controls.md) [Übersicht über Hostelemente und Hoststeuerelemente](../vsto/host-items-and-host-controls-overview.md) [Programmgesteuerte Einschränkungen von Hostelementen und Hoststeuerelementen](../vsto/programmatic-limitations-of-host-items-and-host-controls.md) [Hinzufügen von Steuerelementen zu Office-Dokumenten zur Laufzeit](../vsto/adding-controls-to-office-documents-at-run-time.md)
83.705645
625
0.785972
deu_Latn
0.986185
119ca26fbd1c3b1fc97e229e65ddac3f21455263
221
md
Markdown
README.md
WIZARDISHUNGRY/privsep
93ec07a4ea9d9581034ef63d4b2a069dafc67dd0
[ "MIT" ]
null
null
null
README.md
WIZARDISHUNGRY/privsep
93ec07a4ea9d9581034ef63d4b2a069dafc67dd0
[ "MIT" ]
null
null
null
README.md
WIZARDISHUNGRY/privsep
93ec07a4ea9d9581034ef63d4b2a069dafc67dd0
[ "MIT" ]
null
null
null
# Passing an anonymous unix socket connection over inherted file descriptors My use case for needing to do this is an unstable cgo package :) See also https://jayconrod.com/posts/123/internals-of-go-s-new-fuzzing-system
44.2
77
0.79638
eng_Latn
0.986799
119d7c62c394555a1825db4dfeaa9b8fc87c477f
11,539
md
Markdown
docs/Api/PoolApi.md
brentmullen/fastly-php
5e509563a06984be91f98e03717c206800f61002
[ "MIT" ]
null
null
null
docs/Api/PoolApi.md
brentmullen/fastly-php
5e509563a06984be91f98e03717c206800f61002
[ "MIT" ]
null
null
null
docs/Api/PoolApi.md
brentmullen/fastly-php
5e509563a06984be91f98e03717c206800f61002
[ "MIT" ]
null
null
null
# Fastly\Api\PoolApi ```php $apiInstance = new Fastly\Api\PoolApi( // If you want use custom http client, pass your client which implements `GuzzleHttp\ClientInterface`. // This is optional, `GuzzleHttp\Client` will be used as default. new GuzzleHttp\Client(), $config ); ``` ## Methods Method | Fastly API endpoint | Description ------------- | ------------- | ------------- [**createServerPool()**](PoolApi.md#createServerPool) | **POST** /service/{service_id}/version/{version_id}/pool | Create a server pool [**deleteServerPool()**](PoolApi.md#deleteServerPool) | **DELETE** /service/{service_id}/version/{version_id}/pool/{pool_name} | Delete a server pool [**getServerPool()**](PoolApi.md#getServerPool) | **GET** /service/{service_id}/version/{version_id}/pool/{pool_name} | Get a server pool [**listServerPools()**](PoolApi.md#listServerPools) | **GET** /service/{service_id}/version/{version_id}/pool | List server pools [**updateServerPool()**](PoolApi.md#updateServerPool) | **PUT** /service/{service_id}/version/{version_id}/pool/{pool_name} | Update a server pool ## `createServerPool()` ```php createServerPool($options): \Fastly\Model\PoolResponse // Create a server pool ``` Creates a pool for a particular service and version. ### Example ```php try { $result = $apiInstance->createServerPool($options); } catch (Exception $e) { echo 'Exception when calling PoolApi->createServerPool: ', $e->getMessage(), PHP_EOL; } ``` ### Options Note: the input parameter is an associative array with the keys listed below. Name | Type | Description | Notes ------------- | ------------- | ------------- | ------------- **service_id** | **string** | Alphanumeric string identifying the service. | **version_id** | **int** | Integer identifying a service version. | **tls_ca_cert** | **string** | A secure certificate to authenticate a server with. Must be in PEM format. | [optional] [defaults to 'null'] **tls_client_cert** | **string** | The client certificate used to make authenticated requests. Must be in PEM format. | [optional] [defaults to 'null'] **tls_client_key** | **string** | The client private key used to make authenticated requests. Must be in PEM format. | [optional] [defaults to 'null'] **tls_cert_hostname** | **string** | The hostname used to verify a server&#39;s certificate. It can either be the Common Name (CN) or a Subject Alternative Name (SAN). | [optional] [defaults to 'null'] **use_tls** | **int** | Whether to use TLS. | [optional] [one of: 0, 1] **name** | **string** | Name for the Pool. | [optional] **shield** | **string** | Selected POP to serve as a shield for the servers. Defaults to `null` meaning no origin shielding if not set. Refer to the [POPs API endpoint](/reference/api/utils/pops/) to get a list of available POPs used for shielding. | [optional] [defaults to 'null'] **request_condition** | **string** | Condition which, if met, will select this configuration during a request. Optional. | [optional] **max_conn_default** | **int** | Maximum number of connections. Optional. | [optional] [defaults to 200] **connect_timeout** | **int** | How long to wait for a timeout in milliseconds. Optional. | [optional] **first_byte_timeout** | **int** | How long to wait for the first byte in milliseconds. Optional. | [optional] **quorum** | **int** | Percentage of capacity (`0-100`) that needs to be operationally available for a pool to be considered up. | [optional] [defaults to 75] **tls_ciphers** | **string** | List of OpenSSL ciphers (see the [openssl.org manpages](https://www.openssl.org/docs/man1.1.1/man1/ciphers.html) for details). Optional. | [optional] **tls_sni_hostname** | **string** | SNI hostname. Optional. | [optional] **tls_check_cert** | **int** | Be strict on checking TLS certs. Optional. | [optional] **min_tls_version** | **int** | Minimum allowed TLS version on connections to this server. Optional. | [optional] **max_tls_version** | **int** | Maximum allowed TLS version on connections to this server. Optional. | [optional] **healthcheck** | **string** | Name of the healthcheck to use with this pool. Can be empty and could be reused across multiple backend and pools. | [optional] **comment** | **string** | A freeform descriptive note. | [optional] **type** | **string** | What type of load balance group to use. | [optional] [one of: 'random', 'hash', 'client'] **override_host** | **string** | The hostname to [override the Host header](https://docs.fastly.com/en/guides/specifying-an-override-host). Defaults to `null` meaning no override of the Host header will occur. This setting can also be added to a Server definition. If the field is set on a Server definition it will override the Pool setting. | [optional] [defaults to 'null'] ### Return type [**\Fastly\Model\PoolResponse**](../Model/PoolResponse.md) [[Back to top]](#) [[Back to API list]](../../README.md#endpoints) [[Back to README]](../../README.md) ## `deleteServerPool()` ```php deleteServerPool($options): object // Delete a server pool ``` Deletes a specific pool for a particular service and version. ### Example ```php try { $result = $apiInstance->deleteServerPool($options); } catch (Exception $e) { echo 'Exception when calling PoolApi->deleteServerPool: ', $e->getMessage(), PHP_EOL; } ``` ### Options Note: the input parameter is an associative array with the keys listed below. Name | Type | Description | Notes ------------- | ------------- | ------------- | ------------- **service_id** | **string** | Alphanumeric string identifying the service. | **version_id** | **int** | Integer identifying a service version. | **pool_name** | **string** | Name for the Pool. | ### Return type **object** [[Back to top]](#) [[Back to API list]](../../README.md#endpoints) [[Back to README]](../../README.md) ## `getServerPool()` ```php getServerPool($options): \Fastly\Model\PoolResponse // Get a server pool ``` Gets a single pool for a particular service and version. ### Example ```php try { $result = $apiInstance->getServerPool($options); } catch (Exception $e) { echo 'Exception when calling PoolApi->getServerPool: ', $e->getMessage(), PHP_EOL; } ``` ### Options Note: the input parameter is an associative array with the keys listed below. Name | Type | Description | Notes ------------- | ------------- | ------------- | ------------- **service_id** | **string** | Alphanumeric string identifying the service. | **version_id** | **int** | Integer identifying a service version. | **pool_name** | **string** | Name for the Pool. | ### Return type [**\Fastly\Model\PoolResponse**](../Model/PoolResponse.md) [[Back to top]](#) [[Back to API list]](../../README.md#endpoints) [[Back to README]](../../README.md) ## `listServerPools()` ```php listServerPools($options): \Fastly\Model\PoolResponse[] // List server pools ``` Lists all pools for a particular service and pool. ### Example ```php try { $result = $apiInstance->listServerPools($options); } catch (Exception $e) { echo 'Exception when calling PoolApi->listServerPools: ', $e->getMessage(), PHP_EOL; } ``` ### Options Note: the input parameter is an associative array with the keys listed below. Name | Type | Description | Notes ------------- | ------------- | ------------- | ------------- **service_id** | **string** | Alphanumeric string identifying the service. | **version_id** | **int** | Integer identifying a service version. | ### Return type [**\Fastly\Model\PoolResponse[]**](../Model/PoolResponse.md) [[Back to top]](#) [[Back to API list]](../../README.md#endpoints) [[Back to README]](../../README.md) ## `updateServerPool()` ```php updateServerPool($options): \Fastly\Model\PoolResponse // Update a server pool ``` Updates a specific pool for a particular service and version. ### Example ```php try { $result = $apiInstance->updateServerPool($options); } catch (Exception $e) { echo 'Exception when calling PoolApi->updateServerPool: ', $e->getMessage(), PHP_EOL; } ``` ### Options Note: the input parameter is an associative array with the keys listed below. Name | Type | Description | Notes ------------- | ------------- | ------------- | ------------- **service_id** | **string** | Alphanumeric string identifying the service. | **version_id** | **int** | Integer identifying a service version. | **pool_name** | **string** | Name for the Pool. | **tls_ca_cert** | **string** | A secure certificate to authenticate a server with. Must be in PEM format. | [optional] [defaults to 'null'] **tls_client_cert** | **string** | The client certificate used to make authenticated requests. Must be in PEM format. | [optional] [defaults to 'null'] **tls_client_key** | **string** | The client private key used to make authenticated requests. Must be in PEM format. | [optional] [defaults to 'null'] **tls_cert_hostname** | **string** | The hostname used to verify a server&#39;s certificate. It can either be the Common Name (CN) or a Subject Alternative Name (SAN). | [optional] [defaults to 'null'] **use_tls** | **int** | Whether to use TLS. | [optional] [one of: 0, 1] **name** | **string** | Name for the Pool. | [optional] **shield** | **string** | Selected POP to serve as a shield for the servers. Defaults to `null` meaning no origin shielding if not set. Refer to the [POPs API endpoint](/reference/api/utils/pops/) to get a list of available POPs used for shielding. | [optional] [defaults to 'null'] **request_condition** | **string** | Condition which, if met, will select this configuration during a request. Optional. | [optional] **max_conn_default** | **int** | Maximum number of connections. Optional. | [optional] [defaults to 200] **connect_timeout** | **int** | How long to wait for a timeout in milliseconds. Optional. | [optional] **first_byte_timeout** | **int** | How long to wait for the first byte in milliseconds. Optional. | [optional] **quorum** | **int** | Percentage of capacity (`0-100`) that needs to be operationally available for a pool to be considered up. | [optional] [defaults to 75] **tls_ciphers** | **string** | List of OpenSSL ciphers (see the [openssl.org manpages](https://www.openssl.org/docs/man1.1.1/man1/ciphers.html) for details). Optional. | [optional] **tls_sni_hostname** | **string** | SNI hostname. Optional. | [optional] **tls_check_cert** | **int** | Be strict on checking TLS certs. Optional. | [optional] **min_tls_version** | **int** | Minimum allowed TLS version on connections to this server. Optional. | [optional] **max_tls_version** | **int** | Maximum allowed TLS version on connections to this server. Optional. | [optional] **healthcheck** | **string** | Name of the healthcheck to use with this pool. Can be empty and could be reused across multiple backend and pools. | [optional] **comment** | **string** | A freeform descriptive note. | [optional] **type** | **string** | What type of load balance group to use. | [optional] [one of: 'random', 'hash', 'client'] **override_host** | **string** | The hostname to [override the Host header](https://docs.fastly.com/en/guides/specifying-an-override-host). Defaults to `null` meaning no override of the Host header will occur. This setting can also be added to a Server definition. If the field is set on a Server definition it will override the Pool setting. | [optional] [defaults to 'null'] ### Return type [**\Fastly\Model\PoolResponse**](../Model/PoolResponse.md) [[Back to top]](#) [[Back to API list]](../../README.md#endpoints) [[Back to README]](../../README.md)
49.523605
376
0.676922
eng_Latn
0.815648
119dc702186521b31cf9df67f54205d03758f2d2
371
md
Markdown
README.md
WhiteTshirts/theatalk_frontend
95ef783aab4777d10beb5c57daa98b15f16d26ab
[ "MIT" ]
null
null
null
README.md
WhiteTshirts/theatalk_frontend
95ef783aab4777d10beb5c57daa98b15f16d26ab
[ "MIT" ]
17
2020-10-08T03:55:59.000Z
2021-01-27T14:49:15.000Z
README.md
WhiteTshirts/theatalk_frontend
95ef783aab4777d10beb5c57daa98b15f16d26ab
[ "MIT" ]
null
null
null
# TheaTalk ## 起動方法 `docker-compose build` でビルドします。 `docker-compose up` で開発環境の実行が出来ます。 ## testとlintの回し方 `docker-compose exec front bash` でコンテナにアクセスします。 `yarn test && yarn lint` でtestとlintを回すことが出来ます。 ## mock serverのdbの中身の確認方法 `docker-compose exec json_server bash` でコンテナにアクセスします。 `cat db.json` でdbの中身を確認することが出来ます。 ## 製作者について 基本的に名前がついていないところは Ryuto Hattoriが書きました
15.458333
53
0.77628
yue_Hant
0.614983
119e8bd0a7d615d9b2ea86b5b6e3973fc431866b
2,501
md
Markdown
Doc/G/Microsoft-DUA-OAI/Sec/Rep/0.md
Ralph-Diab/Cmacc-Org
8842eace9159ded10791d2385b871f6aafc46e4e
[ "MIT" ]
42
2015-12-30T20:53:29.000Z
2022-01-04T03:51:50.000Z
Doc/G/Microsoft-DUA-OAI/Sec/Rep/0.md
Ralph-Diab/Cmacc-Org
8842eace9159ded10791d2385b871f6aafc46e4e
[ "MIT" ]
16
2015-10-01T12:01:05.000Z
2022-03-27T23:32:53.000Z
Doc/G/Microsoft-DUA-OAI/Sec/Rep/0.md
Ralph-Diab/Cmacc-Org
8842eace9159ded10791d2385b871f6aafc46e4e
[ "MIT" ]
26
2015-10-07T18:40:44.000Z
2021-02-10T10:53:06.000Z
Ti=Representation and Warranties; Disclaimer 1.sec={_Data_Provider} and {_Data_User} each represent and warrant that it will perform its activities in this {_Agreement} in compliance with applicable laws, including data protection and privacy laws. Comment: This clause gives each party assurance that the other party will be responsible for its compliance with the laws relevant to its obligations under the Agreement. For agreements subject to GDPR, the Agreement may include a provision stating that each party is an independent data controller to further clarify that each party is independently responsible for compliance. 2.sec={_Data_Provider} represents and warrants that it is not aware of any contractual or other restrictions on the {_Training_Data} that would limit {_Data_User}’s {_Training} of the {_AI_Model} or use and distribution of the {_Trained_Model} as contemplated in this {_Agreement}. {_Data_Provider} makes no representations or warranties in this {_Agreement} with respect to {_Data_User}’s rights to use and distribute the underlying {_AI_Model}. Comment: This clause is designed to give assurance to the Data User that the Data Provider is not aware of any restrictions outside of this Agreement on the intended use of the Training Data. However, because the Data Provider is not necessarily the source of the AI Model, this clause also clarifies that the Data Provider’s representations and warranties do not extend to the AI Model itself. Instead, the Data User is responsible for ensuring it has the rights it needs with respect to the AI Model, which is the subject of the next representation and warranty. 3.sec={_Data_User} represents and warrants that it has sufficient rights with respect to the {_AI_Model} to {_Train} the {_AI_Model} and distribute the {_Trained_Model} as required by this {_Agreement}. 4.sec={_Data_Provider} makes no representations or warranties as to the accuracy or completeness of the {_Training_Data} and specifically disclaims any warranties of merchantibility or fitness for a particular purpose with respect to the {_Training_Data}. Except as set forth in this Section {Rep.Xnum} or in Attachment {Annex.ProjectDetails.Xnum}, the {_Training_Data} is provided to {_Data_User} “as-is” and with all faults and defects. Comment: Any representations pertaining to the quality or other attributes of the data should be set out in Attachment A. The default is that the Data User uses the Training Data at its own risk. =[G/Z/ol-a/s4]
138.944444
564
0.809276
eng_Latn
0.999703
119eadf6ef999a2d23e1f081676b6214cd805117
1,325
md
Markdown
docs/readbook/life/1.md
BestDingSheng/my-notes
d5c954277b7d43f7e98ea28749e16baa69bebe20
[ "MIT" ]
null
null
null
docs/readbook/life/1.md
BestDingSheng/my-notes
d5c954277b7d43f7e98ea28749e16baa69bebe20
[ "MIT" ]
null
null
null
docs/readbook/life/1.md
BestDingSheng/my-notes
d5c954277b7d43f7e98ea28749e16baa69bebe20
[ "MIT" ]
null
null
null
# 重要事件记录 > 2021 年 1 月 1 号 ,今天开始生活中一些重要的事情 会记录在这个里面 ## 1月 **2021-01-16** - 早上 6 点老婆羊水破了,去一妇婴急诊 **2021-01-17** - 11 点 03 分, 宝宝出生 **2021-01-21** - 带宝宝和老婆从医院回家 ## 2月 **2021-02-11** - 过年啦 **2021-02-16** - 办满月酒 **2021-02-25** - 买房付了定金 - 老婆生育金到账 ## 3月 **2021-03-01** - 今天和领导说了请婚假,准备周三开始请假 回家给宝宝办理户口 **2021-03-03** - 今天去学校把档案重新密封了下,晚上绿皮车回九江 **2021-03-04** - 给宝宝办了户口 - 档案存放在人才中心了 **2021-03-10** - 带宝宝来医院做体检 - 给宝宝拍B超片子 **2021-03-23** - 约了房东在中介公司付首付 - 当天办了贷款 **2021-03-29** - 老妈住院手术 **2021-03-30** - 出院, 医院床位太火爆,住了一天院益生菌就让回家了 花了 1万零8百多 ,因为在老家做了备案,出院的时候结算直接报销了部分费用不用在回老家报销了 ## 4月 **2021-04-01** - 贷款审批通过 **2021-04-08** - 公司的理赔结案了 给老妈理赔了 5千多 - 来闵行交易中心过户 - 房东给了钥匙 **2021-04-11** - 今天把新房子卫生打扫了下,把卧室床装好了 **2021-04-14** - 到交易中心交税,买房所有流程完美收官 **2021-04-23** - 公司来了新同事, leader 让我做他的导师 带他熟悉业务 哈哈哈 , 第一次做导师有那么点小激动 **2021-04-29** - 今天去银行拿了房产证 - 和很久没见的老同学见面了,然后去另一个同学开的餐厅吃了饭 ## 5 月 **2021-05-01** - 买了马桶和热水器 - 新家热水器安装好了 **2021-05-05** - 今天装马桶的人来安装了 然后发现马桶的坑距不对 退货重买 - 装防盗门窗的人 今天也上门来安装了 装的挺满意的 **2021-05-13** - 老婆产假休完 今天上班去了 **2021-05-17** - 第一次带宝宝超时 **2021-05-22** - 第一次一家三口出去玩 去世博园 **2021-05-24** - 刘泽井 蔡安法离职 相处了三年的同事还是有些不舍 ## 6 月 **2021-06-18** 京东 618 - 买了电视 - 冰箱 ## 7 月 **2021-07-08** - 佳文离职, 老同事就剩下我一个了 人生如此艰难 **2021-07-17** 今天宝宝 6 个月了 开始吃米糊 **2021-07-19** 公司通知团建,我选择去澳门
8.892617
74
0.634717
yue_Hant
0.682241
119eb51fdd1d8b4d7fc4fc679b5b4b040f77fd19
702
md
Markdown
ovine-archive/README.md
byL1N3/byl1n3.github.io
efc2c0eeb6e357acc75da48a083dd1c80da28a7b
[ "MIT" ]
53
2017-07-25T16:57:43.000Z
2022-03-27T16:41:08.000Z
ovine-archive/README.md
byL1N3/byl1n3.github.io
efc2c0eeb6e357acc75da48a083dd1c80da28a7b
[ "MIT" ]
27
2016-10-31T16:46:52.000Z
2022-03-20T18:03:07.000Z
ovine-archive/README.md
byL1N3/byl1n3.github.io
efc2c0eeb6e357acc75da48a083dd1c80da28a7b
[ "MIT" ]
56
2016-12-08T17:44:57.000Z
2022-03-19T12:04:53.000Z
## Notes Maybe the idea behind this is that some sheep found these stories strewn somewhere and decided to collect them and label them some weird name, as seen at the bottom of the document because it ruins my syntax highlighting. Getting the Unicode character for an italicized *p* can be done from https://yaytext.com/bold-italic/. One could say that the documents were found deep in the archives in Ovine Loc 87. The name seemed to have been caused by the sheep's deficiency in computer literacy, but due to the strict guidelines on titles of ovine works, regardless of the original author, the exact style&mdash;except for the font family itself&mdash;must be preserved. > *p*Apers found"""e',
70.2
340
0.787749
eng_Latn
0.9999
119fc314e26ef556bc21b4cd8c47f733a20524c4
574
md
Markdown
content/navigation/dacseedacs/index.zh-cn.md
itey/metabd
263376ea70afea08cbc752c7117928826d2ec61a
[ "MIT" ]
null
null
null
content/navigation/dacseedacs/index.zh-cn.md
itey/metabd
263376ea70afea08cbc752c7117928826d2ec61a
[ "MIT" ]
6
2022-03-14T18:35:35.000Z
2022-03-28T18:43:54.000Z
content/navigation/dacseedacs/index.zh-cn.md
itey/metabd
263376ea70afea08cbc752c7117928826d2ec61a
[ "MIT" ]
null
null
null
--- weight: title: "Dacsee(DACS)" description: "Dacsee是一个去中心化的拼车服务平台" date: 2022-03-25T21:57:40+08:00 lastmod: 2022-03-25T16:45:40+08:00 draft: false authors: ["Metabd"] featuredImage: "dacseedacs.webp" link: "" tags: ["数字代币","Dacsee(DACS)"] categories: ["navigation"] navigation: ["数字代币"] lightgallery: true toc: true pinned: false recommend: false recommend1: false --- Dacsee是一个去中心化的拼车服务平台。司机可以在Dacsee平台上创建约车服务,管理数据和发展业务。乘客可以用以太坊区块链上的加密代币支付起步费,也可以用其他代币或者传统的现金支付。Dacsee这种独特的社交设计使其能够在没有任何实体公司的协助或控制的情况下快速发展。P2P支付方式,独特的增长系统和司机个人发展线上业务这3个特性让Dacsee在全球新市场中迅速传播和发展,打破当前的垄断局面。
27.333333
199
0.801394
yue_Hant
0.698875
dacd30307ae717ce4b508d2cd64dfc929cff5442
5,556
md
Markdown
org/blog/2019-yearly-report/de.md
starfetch/markdown
e72946e83aa869e3a6ac064530dfd9298ed1ff44
[ "MIT" ]
7
2019-06-26T14:03:06.000Z
2021-07-11T03:48:31.000Z
org/blog/2019-yearly-report/de.md
starfetch/markdown
e72946e83aa869e3a6ac064530dfd9298ed1ff44
[ "MIT" ]
76
2019-06-12T07:03:47.000Z
2021-08-15T22:55:57.000Z
org/blog/2019-yearly-report/de.md
starfetch/markdown
e72946e83aa869e3a6ac064530dfd9298ed1ff44
[ "MIT" ]
51
2019-07-02T07:39:40.000Z
2021-11-18T17:11:20.000Z
--- date: 2019-12-10 title: 4109 Gründe, sich über 2019 zu freuen linktitle: "Jahresbericht 2019" img: cover.jpg caption: "Nahaufnahme eines Löwenzahns vor rosa Hintergrund" author: joost --- Wahnsinn! Was für ein Jahr für FreeSewing. Im August haben wir die Version 2.0 veröffentlicht, was nichts Geringeres als als ein komplettes Umschreiben unseres gesamten Technologie-Stacks bedeutet hat. Die offensichtlichste Änderung für unsere Nutzer ist, dass ihr eure Schnittmuster jetzt live im Browser angepasst sehen könnt, während ihr die Einstellungen und Optionen anpasst. Das ist eines der Dinge, von denen ich wusste, dass sie mit dem heutigen Stand der Web-Technologie möglich sein sollten, aber es tatsächlich zu sehen übersteigt irgendwie meine Vorstellungskraft. ## Skalierung ist schwierig, aber wir haben beachtenswerte Fortschritte gemacht So cool wie unser neuer Technologie-Stack ist, ist es nicht die wichtigste Arbeit, die wir im letzten Jahr geleistet haben. Ja, es ist ist recht ordentlich und ja, es ist nur wegen der Arbeit am 2.0 möglich. Aber es ist die Arbeit an sich, die am wichtigsten ist. Der Hauptgrund für das Umschreiben von 2.0 war es, eine vertikale Skalierung des Projekts zu ermöglichen. Oder, um es, es ganz offen auszudrücken: Um es Freesewing zu erlauben sich dorthin zu entwickeln, wo ich es nicht alleine hintragen konnte. In gewisser Weise ist das (Software-)Projekt FreeSewing erwachsen geworden. Wir haben mehrere Personen, die regelmäßig etwas beitragen, [einen aktiven Chat-Room](https://discord.freesewing.org/), [eine speziell für Entwickler gedachte Website und Dokumentation für Übersetzer](https://freesewing.dev), eine Vielzahl an [Paketen, die wir auf NPM veröffentlichen](https://www.npmjs.com/search?q=keywords:freesewing), wir haben an der benötigten Upstream-Software Bugfixes und Verbesserungen gemacht, und wir haben nun auch andere Personen und Teams, die auf unsere Pakete angewiesen sind. Wir haben (derzeit) [23 verfügbare Schnittmuster](/patterns/) und wir veröffentlichen 62 Pakete auf NPM (die Node.js Paket-Registry). Seit wir Version 2.0 veröffentlicht haben, schreiben sich pro Monat durchschnittlich 450 Leute ein und unsere Gesamtzahl zeigt aktuell über 15,000 an. ## Übersetzen war noch nie einfacher Seit v2 haben wir für [unsere Übersetzungen](https://freesewing.dev/guides/translator/) auf [Crowdin](https://crowdin.com) gewechselt. Übersetzung ist wohl der beste Weg, um den Zugang zu unserer Plattform zu demokratisieren, und ich möchte mich bei allen Menschen herzlich bedanken, die bei diesen Bemühungen geholfen haben und weiterhin helfen. Vielleicht ist das ein guter Zeitpunkt, um euch darauf hinzuweisen, dass auch ihr mithelfen könnt. Unsere [Dokumentation für Übersetzer](https://freesewing.dev/guides/translator/) ist ein guter Startpunkt, oder schaut in [unserem Chatroom](https://discord.freesewing.org/) vorbei. ## Was wir für das nächste Jahr vor haben Unsere Pläne für das nächste Jahr lassen sich grob in zwei Kategorien unterteilen: die Verbesserung unserer Plattform und das Hinzufügen weiterer Schnittmuster. Mit dem Wachstum des Projekts wächst auch der Arbeitsaufwand, der erforderlich ist, um alles reibungslos am Laufen zu halten. Wir müssen noch weitere Performance-Verbesserungen vornehmen und Hilfsaufgaben erledigen, wie z. B. mehr Tests schreiben, damit wir mit Zuversicht die Dinge ändern können, anstatt zu riskieren, dass die Einführung einer neuen Funktion im weiteren Verlauf Fehler verursacht. Wir wissen auch, dass es viel Raum für die Verbesserung der Benutzererfahrung (UX) sowie des Designs und der Benutzeroberfläche (UI) gibt. Wir haben unser Bestes getan, aber es ist nicht wirklich unser Fachgebiet, und wir hoffen, dass wir noch mehr Mitwirkende finden, die uns in dieser Hinsicht helfen können. ## v2.2 wird einen Damen-Grundschnitt enthalten Aber hey, du wolltest mehr Schnittmuster, richtig? Dir sei also versichert, dass das ganz oben auf unserer Todo-Liste steht. Wir werden dem Bereich Damenschnittmuster, der bisher unterrepräsentiert war, extra Aufmerksamkeit und Ressourcen widmen. Nicht nur, indem wir ihnen bestehende Muster zur Verfügung stellen (wie wir es kürzlich mit Simone, einer Damenversion unseres Simon-Musters, getan haben), sondern auch durch die Entwicklung eines speziellen Damengrundschnittes, auf dessen Grundlage weitere Schnittmuster entwickelt werden können. Wir haben dies als ein Muss für FreeSewing v2.2 vorgesehen, von dem wir hoffen, dass es irgendwann im Januar veröffentlicht werden kann. Wir suchen auch nach Möglichkeiten, mehr Designer für FreeSewing ins Boot zu holen. Ein Plan auf dem Zeichenbrett ist es, Designern Paar-Programmiersitzungen anzubieten wo sie uns durch ihre Vision führen, und wir setzen ihren Entwurf dann in FreeSewing um. Wir denken sogar darüber nach, diese Sitzungen per Live-Streaming zu übertragen, so dass jede interessierte Person vorbeischauen und es mitverfolgen kann. ## Es ist die Zeit des Schenkens Dank unserer großartigen Förderer sind die Einnahmen auch in diesem Jahr gestiegen. Wie du vielleicht weißt, spendet FreeSewing 100% der Einnahmen an Ärzte ohne Grenzen/Médecins Sans Frontières. Daher hatte ich heute Morgen die große Ehre, einen Scheck über 4109,38€ auf [MSF](https://www.msf.org/) auszustellen. Das fühlte sich **wirklich gut** an, also vielen Dank an [alle unsere Förderer](/patrons) für ihre anhaltende Unterstützung. Wenn du Teil dieser großartigen Gruppe von Menschen werden möchtest, [kannst du dies hier tun](/patrons/join).
113.387755
586
0.810475
deu_Latn
0.999298
dacd596bbf94b3fcca2bc396b0eafbe06ff00dde
774
md
Markdown
README.md
helb/imagemin-crunch
1aa5b8dfc9203e28a9001a06b1fe9136757de667
[ "MIT" ]
1
2018-06-15T13:21:42.000Z
2018-06-15T13:21:42.000Z
README.md
helb/imagemin-crunch
1aa5b8dfc9203e28a9001a06b1fe9136757de667
[ "MIT" ]
null
null
null
README.md
helb/imagemin-crunch
1aa5b8dfc9203e28a9001a06b1fe9136757de667
[ "MIT" ]
null
null
null
# `imagemin-crunch` > An [imagemin](https://github.com/imagemin/imagemin) plugin to optimize PNGs using chained pngquant and zopfli. Inspired by [Chris Simpkins’](https://github.com/chrissimpkins) [Crunch](https://github.com/chrissimpkins/Crunch/) [![Build Status](https://travis-ci.org/helb/imagemin-crunch.svg?branch=master)](https://travis-ci.org/helb/imagemin-crunch) [![npm version](https://badge.fury.io/js/imagemin-crunch.svg)](https://badge.fury.io/js/imagemin-crunch) ## Installation ``` $ npm i -S imagemin-crunch ``` ## Usage ```js const imagemin = require('imagemin'); const imageminCrunch = require('imagemin-crunch'); imagemin(['images/*.png'], 'build/images', { use: [ imageminCrunch() ] }).then(() => { console.log('Images optimized'); }); ```
25.8
227
0.70155
kor_Hang
0.211687
dacd5a7d04b9e25a874c61e258f22f6943fe5558
1,007
md
Markdown
docs/bootstrap/secrets.md
jmgilman/lab
55fd7bceb549cd7b213820fe130792b836f85911
[ "MIT" ]
null
null
null
docs/bootstrap/secrets.md
jmgilman/lab
55fd7bceb549cd7b213820fe130792b836f85911
[ "MIT" ]
null
null
null
docs/bootstrap/secrets.md
jmgilman/lab
55fd7bceb549cd7b213820fe130792b836f85911
[ "MIT" ]
null
null
null
# Bootstrap Secrets The primary secret provider for the GLab stack is Hashicorp Vault. However, relying on it during the bootstrap process creates a chicken/egg scenario because initially the service is not available to be used. To workaround this, the AWS Parameter Store is utilized to provide an early method for obtaining sensitive data required for the bootstrap process. The process for creating, setting, and deleting bootstrap secrets is done via the CLI tool. The purpose behind this is to prevent vendor lock-in to one specific secret provider for the bootstrap process. The CLI tool by default uses the AWS Parameter Store but can be further expanded to use other backends as needed. Most bootstrap secrets persist even after the bootstrap process and can vary from third-party API credentials to SSH provisioning certificates. To increase security, credentials that are not used outside the stack are randomly generated during the bootstrap process and persisted when Vault is configured.
55.944444
80
0.821251
eng_Latn
0.999608
dacd6ea56652dbf002b122a599a96f760ab02469
1,094
md
Markdown
source/_drafts/effective-ruby.md
hzlu/luhaozhao.com
f5443fce78fc495d0210e3cdb6ee23bcb371f162
[ "MIT" ]
null
null
null
source/_drafts/effective-ruby.md
hzlu/luhaozhao.com
f5443fce78fc495d0210e3cdb6ee23bcb371f162
[ "MIT" ]
null
null
null
source/_drafts/effective-ruby.md
hzlu/luhaozhao.com
f5443fce78fc495d0210e3cdb6ee23bcb371f162
[ "MIT" ]
null
null
null
--- title: 改善 Ruby 程序的建议 category: ruby --- + 除了false和nil外所有值都表示真值 + Ruby的0值也是真值 + 如果需要区分FALSE和nil,可以使用nil?方法或"=="操作符并将false作为左操作对象 + 当使用include方法来将模块引入类时,会创建一个单例类并将它插入类体系中,最后包含的模块最先访问到。 + 模块永远不会重载类中的方法 + ancestors方法返回组成继承体系的所有类和模块的数组,只能在类和模块上被调用,并会跳过单例类。 + included_modules方法返回和ancestors方法一样的数组,不过其中所有的类会被过滤掉。 + 使用`class << obj`打开单例类 + super其实是Ruby的关键字,如果给它添加括号或参数,它就表现得像一个方法;如果直接使用,super在调用目标方法时将宿主方法的所有参数全部传递过去。 + 调用super时,它从继承体系的上一层寻找同名方法,而不是简单的从超类开始。 + 如果不在类中定义initialize方法,定义的类将从BasicObject类中继承默认实现。 + 当使用dup或clone创建对象副本时,可以在类中定义initialize_copy方法对新创建的副本对象执行特别的逻辑。Ruby会自动私有该方法。方法接受原对象作为参数。 + Ruby调用setter方法时要求存在一个显式的接收者,否则就是一个简单的变量赋值 + 在处理结构化数据时,如果创建一个新类不那么合适时,推荐使用Struct而非Hash,Struct::new方法还能接收一个可选块,定义实例方法和类方法。 + 将Struct::new的返回值赋给常量,并像类一样使用它。 + 如果用来创建命名空间的模块已经在前面定义过,那么可以在类定义中直接使用模块名和类路径分隔符 + 常量查找:先检查当前词法作用域,如果没有找到常量将循着继承体系继续寻找 + dup只会拷贝对象的tainted状态,clone拷贝对象的被冻结frozen和受污染tainted状态,clone会拷贝对象的单例类 + 通过protected共享私有状态,一个对象的protected方法若要被显式接收者调用,除非该对象与接收者是同类对象或其具有相同的定义该protected方法的超类。 + 类变量`@@a`的通常用途是实现单例模式(不同于单例类,单例模式:类只有一个实例)。由于类变量会被所有子类共享,这些类的任何实例对象都可以访问并修改这些共享的类变量,因此应该使用类的实例变量。
42.076923
98
0.869287
yue_Hant
0.61992
dacdf766c680b01b9172d41da94e325e23b73b3f
1,623
md
Markdown
_the-good-doctor/temporada-3/capitulo-1.md
breaktimetv2/breaktimetv2.github.io
486dafc58b6e6c7e3fd3aaf38f334466bb9651a7
[ "MIT" ]
null
null
null
_the-good-doctor/temporada-3/capitulo-1.md
breaktimetv2/breaktimetv2.github.io
486dafc58b6e6c7e3fd3aaf38f334466bb9651a7
[ "MIT" ]
null
null
null
_the-good-doctor/temporada-3/capitulo-1.md
breaktimetv2/breaktimetv2.github.io
486dafc58b6e6c7e3fd3aaf38f334466bb9651a7
[ "MIT" ]
null
null
null
--- layout: episodios title: "The Good Doctor 3x01" url_serie_padre: 'the-good-doctor/temporada-3' category: 'series' capitulo: 'yes' anio: '2019' prev: '' proximo: 'capitulo-2' sandbox: allow-same-origin allow-forms idioma: 'Subtitulado' calidad: 'Full HD' reproductor: fembed image_banner: 'https://res.cloudinary.com/dmsdzouoo/image/upload/v1569378782/edLV34FXx1iFJA3hbZE7SYRSS4m-min_a6jdfg.jpg' reproductores: ["https://hls4.openloadpremium.com/player.php?id=dFVTd3dyMXN5dVJENEh0cUNJN0JuRWphVWhBcUs2UHBta3o1QldmUEFHQ24wNzdURVdvbnpUT3BzSGtaTkUydUhvVlpWOXNsaFE5T3d6MVh5Yi92SUE9PQ&sub=https://sub.cuevana2.io/vtt-sub/sub7/The.Good.Doctor.3x01.vtt","https://tutumeme.net/embed/player.php?u=bXQ3ajJOaW1wcFRGcEs2VW5XRGExTlRPMytmUnc3bHVwcWhoenVIUjI5SHF5TlNwc0taaG1jN2gwZHZSNTlIRHVhV2tZWitkNUtDVDNOL1ZvYW1rYjJOaW9LV1o","https://api.cuevana3.io/olpremium/gd.php?file=ek5lbm9xYWNrS0xNejZabVlkSFIyTkxQb3BPWDB0UFkwY3lvbjJIRjBPQ1QwNStUck1mVG9kVExvM0djeHA3VnFybXRscUdvMWRXNHRZbU1lYXVUeDg2cGpKVmp4cXpBejYxcGxJcW9zdFdVcmFXSWk2eTB3cXk5b29pS3FMYkFsSzFqbG1TOHk4V3B4NDJYbnJ5eHdyakhxb1NLbHRmTnFyMkxnNS9OMk1XVzA0cUllYnlsMXFxbG5JaWh2TW5YcXFXaWY2Q3dwY0dWdmFhQVk5S2x5ZExHYklLRWlNbmYxOG1ZYjZ6SDFBPT0","https://api.cuevana3.io/stream/index.php?file=ek5lbm9xYWNrS0xYMTZLa2xNbkdvY3ZTb3BtZng4TGp6ZFpobGFMUGtOVEx6SitYWU5YTTdORE1vWmRnbEpham5KTmtZSlRTMGViVTBxZGdsdEhPb3RqWGFXWnFrcFdxbk1LR2gzV3l3THVvd29aaVpzR21vNVdSb0tKbm9kSGkxOWVTcHF6U3hyRFh5S1dibUE9PQ","https://api.cuevana3.io/rr/gd.php?h=ek5lbm9xYWNrS0xJMVp5b21KREk0dFBLbjVkaHhkRGdrOG1jbnBpUnhhS1ZzMnhxaWF1WjFkclZtWng2dHE2a3hyaVlhNml6MGVldXJZcGtyYVhVdUsyU3FadVkyUT09"] tags: - Drama ---
54.1
1,194
0.882933
yue_Hant
0.368386
daceb2f32a8ef980d5409cd1b571c413db1bc59e
20,259
md
Markdown
Workshop.md
mindgraffiti/ToDoBackend
59fca98c11fe2e9045bdf38542a419f83e0f2c28
[ "Apache-2.0" ]
null
null
null
Workshop.md
mindgraffiti/ToDoBackend
59fca98c11fe2e9045bdf38542a419f83e0f2c28
[ "Apache-2.0" ]
null
null
null
Workshop.md
mindgraffiti/ToDoBackend
59fca98c11fe2e9045bdf38542a419f83e0f2c28
[ "Apache-2.0" ]
null
null
null
# Building a "ToDo" Backend with Kitura <p align="center"> <img src="https://www.ibm.com/cloud-computing/bluemix/sites/default/files/assets/page/catalog-swift.svg" width="120" alt="Kitura Bird"> </p> <p align="center"> <a href= "http://swift-at-ibm-slack.mybluemix.net/"> <img src="http://swift-at-ibm-slack.mybluemix.net/badge.svg" alt="Slack"> </a> </p> ## Run the tests In order to implement a ToDo Backend, a server is required that provides support for storing, retrieving, deleting and updating "to do" items. The ToDoBackend project doesn't provide a specification for how the server must respond, rather it provides a set of tests which the server must pass. The "todo-backend-js-spec" project provides those tests. ### 1. Run the ToDo-Backend Tests: 1. Open the tests in a web browser: `open ~/todo-backend-js-spec/index.html` 2. Set a "test target root" of `http://localhost:8080` 3. Click "run tests". All the tests should fail. The first error reported should be as follows: :x: `the api root responds to a GET (i.e. the server is up and accessible, CORS headers are set up)` ``` AssertionError: expected promise to be fulfilled but it was rejected with [Error: GET http://localhost:8080/ FAILED The browser failed entirely when make an AJAX request. ``` This shows that the tests made a `GET` request to `http://localhost.com:8080`, but it failed with no response. This is expected as there is no server running yet - we're going to fix that in a moment! In the instructions below, reloading the page will allow you to re-run the ToDo-Backend tests. ## Building a Kitura server Implementing a compliant ToDo Backend is an incremental task, with the aim being to pass more of the testsuite at each step. The first step is to build and run your Kitura server so it can respond to requests. ### 1. Run your Kitura server 1. Open the ToDoServer project in Xcode. ``` cd ~/ToDoBackend/ToDoServer open ToDoServer.xcodeproj ``` 2. Run your Kitura server in Xcode: 1) Change the selected target from "ToDoServer-Package" to the "ToDoServer" executable. 2) Press the `Run` button or use the `⌘+R` key shortcut. 3) Select "Allow incoming network connections" if you are prompted. 3. Check that some of the standard Kitura URLs are running: * Kitura splash screen: [http://localhost:8080/](http://localhost:8080/) * Kitura monitoring dashboard: [http://localhost:8080/swiftmetrics-dash/](http://localhost:8080/swiftmetrics-dash/) * Kitura health API: [http://localhost:8080/health](http://localhost:8080/health) ### 2. Add support for OpenAPI **Important**: If you started your project by choosing the "OpenAPI" tile in the Kitura desktop app, you can skip to Section 3. [OpenAPI](https://www.openapis.org/) is the most popular way to document RESTful web services. The OpenAPI ecosystem provides a broad range of tools and services for developers across the API lifecycle. Kitura provides a package which makes it easy to add OpenAPI support to your application. Let's add OpenAPI. 1. Open the `ToDoServer` > `Package.swift` file 2. Add the following to the end of the dependencies section of the `Package.swift` file: ```swift .package(url: "https://github.com/IBM-Swift/Kitura-OpenAPI.git", from: "1.0.0"), ``` 3. Update the target dependencies for the "Application" target to the following (note the lack of hyphen in KituraOpenAPI): ```swift .target(name: "Application", dependencies: ["KituraOpenAPI", "Kitura", "CloudEnvironment", "Health", "SwiftMetrics" ]), ``` In order for Xcode to pick up the new dependency, the Xcode project now needs to be regenerated. 1. Close Xcode. 2. Regenerate the Xcode project and reopen: ``` cd ~/ToDoBackend/ToDoServer swift package generate-xcodeproj open ToDoServer.xcodeproj ``` Now we need to enable OpenAPI in our Kitura server. 1. Open the `Sources` > `Application` > `Application.swift` file 2. Add an import for the KituraOpenAPI library to the start of the file: ```swift import KituraOpenAPI ``` 3. Add the following code into the end of the `postInit()` function after the call to `initializeHealthRoutes()`: ```swift KituraOpenAPI.addEndpoints(to: router) ``` 4. Re-run the server project in Xcode: 1) Edit the scheme again and select a Run Executable of "ToDoServer". 2) Run the project, then "Allow incoming network connections" if you are prompted. ### 3. Try out OpenAPI in Kitura Now, you can open [http://localhost:8080/openapi](http://localhost:8080/openapi) and view the live OpenAPI specification of your Kitura application in JSON format. You can also open [http://localhost:8080/openapi/ui](http://localhost:8080/openapi/ui) and view SwaggerUI, a popular API development tool. You will see one route defined: the GET `/health` route you visited earlier. Click on the route to expand it, then click "Try it out!" to query the API from inside SwaggerUI. You should see a Response Body in JSON format, like: ``` { "status": "UP", "details": [], "timestamp": "2018-06-04T16:03:17+0000" } ``` and a Response Code of 200. Congratulations, you have added OpenAPI support to your Kitura application and used SwaggerUI to query a REST API! ### 4. Add Cross Origin Resource Sharing (CORS) Support Re-run the ToDo-Backend tests by reloading the test page in your browser. The first test should still fail with the following: :x: `the api root responds to a GET (i.e. the server is up and accessible, CORS headers are set up)` ``` AssertionError: expected promise to be fulfilled but it was rejected with [Error: GET http://localhost:8080 FAILED The browser failed entirely when make an AJAX request. Either there is a network issue in reaching the url, or the server isn't doing the CORS things it needs to do. ``` This test is still failing, even though the server is responding on `localhost:8080`. This is because Cross Origin Resource Sharing (CORS) is not enabled. By default, web servers only serve content to web pages that were served by that web server. In order to allow other web pages, such as the ToDo-Backend test page, to connect to the server, [Cross Origin Resource Sharing (CORS)](https://en.wikipedia.org/wiki/Cross-origin_resource_sharing) must be enabled. Kitura provides a package which makes it easy to enable CORS in your application. Let's add CORS to your project. 1. Open the `ToDoServer` > `Package.swift` file 2. Add the following to the end of the dependencies section of the `Package.swift` file: ```swift .package(url: "https://github.com/IBM-Swift/Kitura-CORS.git", from: "2.1.0"), ``` 3. Update the target dependencies for the "Application" target to the following (note the lack of hyphen in KituraCORS): ```swift .target(name: "Application", dependencies: ["KituraCORS", "KituraOpenAPI", "Kitura", "CloudEnvironment", "Health", "SwiftMetrics" ]), ``` In order for Xcode to pick up the new dependency, the Xcode project now needs to be regenerated. 1. Close Xcode. 2. Regenerate the Xcode project and reopen: ``` cd ~/ToDoBackend/ToDoServer swift package generate-xcodeproj open ToDoServer.xcodeproj ``` Now we need to enable CORS in our Kitura server. 1. Open the `Sources` > `Application` > `Application.swift` file 2. Add an import for the CORS library to the start of the file: ```swift import KituraCORS ``` 3. Add the following code at the start of the `postInit()` function: ```swift let options = Options(allowedOrigin: .all) let cors = CORS(options: options) router.all("/*", middleware: cors) ``` 4. Re-run the server project in Xcode 1) Edit the scheme again and select a Run Executable of "ToDoServer". 2) Run the project, then "Allow incoming network connections" if you are prompted. 5. Re-run the tests by reloading the test page in your web browser. The first test should now be passing! But the second test is failing: :x: `the api root responds to a POST with the todo which was posted to it` In order to fix this, we need to implement a `POST` request that saves a ToDo item. ### 5. Add Support for handling a POST request on `/` REST APIs typically consist of an HTTP request using a verb such as `POST`, `PUT`, `GET` or `DELETE` along with a URL and an optional data payload. The server then handles the request and responds with an optional data payload. A request to store data typically consists of a POST request with the data to be stored, which the server then handles and responds with a copy of the data that has just been stored. This means we need to define a `ToDo` type, register a handler for POST requests on `/`, and implement the handler to store the data. 1. Define a data type for the ToDo items: 1. Select the Application folder in the left hand explorer in Xcode   2. Select `File` > `New` > `File...` from the pull down menu 3. Select `Swift File` and click `Next`   4. Name the file `Models.swift`, change the `Targets` from `ToDoServerPackageDescription` to `Application`, then click `Create`   5. Add the following to the created file: ```swift public struct ToDo : Codable, Equatable { public var id: Int? public var user: String? public var title: String? public var order: Int? public var completed: Bool? public var url: String? public static func ==(lhs: ToDo, rhs: ToDo) -> Bool { return (lhs.title == rhs.title) && (lhs.user == rhs.user) && (lhs.order == rhs.order) && (lhs.completed == rhs.completed) && (lhs.url == rhs.url) && (lhs.id == rhs.id) } } ``` This creates a struct for the ToDo items that uses Swift 4's `Codable` capabilities. 2. Create an in-memory data store for the ToDo items   1. Open the `Sources` > `Application` > `Application.swift` file   2. Add `todoStore`, `nextId` and `workerQueue` properties into the App class. On the line below `let cloudEnv = CloudEnv()` add: ```swift private var todoStore: [ToDo] = [] private var nextId: Int = 0 private let workerQueue = DispatchQueue(label: "worker") ``` 3. To be able to use `DispatchQueue` on Linux, add the following `import` statement to the start of the file: ```swift import Dispatch ``` 4. Add a helper method at the end of the class, before the last closing brace ```swift func execute(_ block: (() -> Void)) { workerQueue.sync { block() } } ``` This will be used to make sure that access to the todoStore is serialized, so the app does not crash on concurrent requests. 3. Register a handler for a `POST` request on `/` that stores the ToDo item data. 1. Add the following into the `postInit()` function: ```swift router.post("/", handler: storeHandler) ``` 2. Implement the `storeHandler()` that receives a ToDo, and returns the stored ToDo. Add the following as a function in the App class: ```swift func storeHandler(todo: ToDo, completion: (ToDo?, RequestError?) -> Void ) { var todo = todo if todo.completed == nil { todo.completed = false } todo.id = nextId todo.url = "http://localhost:8080/\(nextId)" nextId += 1 execute { todoStore.append(todo) } completion(todo, nil) } ```   This expects to receive a ToDo struct from the request, sets `completed` to false if it is `nil` and adds a `url` value that informs the client how to retrieve this ToDo item in the future.   The handler then returns the updated ToDo item to the client. 4. Run the project and rerun the tests by reloading the test page in the browser. The first three tests should now pass. Open SwaggerUI again at [http://localhost:8080/openapi/ui](http://localhost:8080/openapi/ui) and expand the new POST route on `/`. Paste the following JSON into the "input" text box: ``` { "title": "mow the lawn" } ``` Click "Try it out!" and view the response body below. You should see a JSON object representing the new ToDo item you created in the store: ``` { "id": 0, "title": "mow the lawn", "completed": false, "url": "http://localhost:8080/0" } ``` Congratulations, you have successfully added a ToDo item to the store using SwaggerUI! Going back to the testsuite webpage, the next failing test says this: :x: `after a DELETE the api root responds to a GET with a JSON representation of an empty array` In order to fix this, handlers for `DELETE` and `GET` requests are needed. ### 6. Add Support for handling a DELETE request on `/` A request to delete data typically consists of a DELETE request. If the request is to delete a specific item, a URL encoded identifier is normally provided (eg. '/1' for the item with ID 1). If no identifier is provided, it is a request to delete all of the items. In order to pass the next test, the ToDoServer needs to handle a `DELETE` on `/` resulting in removing all stored ToDo items. Register a handler for a `DELETE` request on `/` that empties the ToDo item data. 1. Add the following into the `postInit()` function: ```swift router.delete("/", handler: deleteAllHandler) ``` 2. Implement the `deleteAllHandler()` that empties the todoStore   Add the following as a function in the App class: ```swift func deleteAllHandler(completion: (RequestError?) -> Void ) { execute { todoStore = [] } completion(nil) } ``` Build and run your application again, then reload SwaggerUI to see your new DELETE route. Expand the route and click "Try it out!" to delete the contents of the store. You should see a Response Body of "no content" and a Response Code of 204, indicating that the server successfully fulfilled the request. ### 7. Add Support for handling a GET request on `/` A request to load all of the stored data typically consists of a `GET` request with no data, which the server then handles and responds with an array of all the data in the store. 1. Register a handler for a `GET` request on `/` that loads the data Add the following into the `postInit()` function: ```swift router.get("/", handler: getAllHandler) ``` 2. Implement the `getAllHandler()` that responds with all of the stored ToDo items as an array.       Add the following as a function in the App class: ```swift func getAllHandler(completion: ([ToDo]?, RequestError?) -> Void ) { completion(todoStore, nil) } ``` 3. Run the project and re-run the tests by reloading the test page in the browser. The first seven tests should now pass, with the eighth test failing: :x: `each new todo has a url, which returns a todo` ``` GET http://localhost:8080/0 FAILED 404: Not Found (Cannot GET /0.) ``` Refresh SwaggerUI again and view your new GET route. Clicking "Try it out!" will return the empty array (because you just restarted the application and the store is empty), but experiment with using the POST route to add ToDo items then viewing them by running the GET route again. REST APIs are easy! ### 8. Add Support for handling a `GET` request on `/:id` The next failing test is trying to load a specific ToDo item by making a `GET` request with the ID of the ToDo item that it wishes to retrieve, which is based on the ID in the `url` field of the ToDo item set when the item was stored by the earlier `POST` request. In the test above the reqest was for `GET /0` - a request for id 0. Kitura's Codable Routing is able to automatically convert identifiers used in the `GET` request to a parameter that is passed to the registered handler. As a result, the handler is registered against the `/` route, with the handler taking an extra parameter. 1. Register a handler for a `GET` request on `/`: ```swift router.get("/", handler: getOneHandler) ``` 2. Implement the `getOneHandler()` that receives an `id` and responds with a ToDo item: ```swift func getOneHandler(id: Int, completion: (ToDo?, RequestError?) -> Void ) { guard let todo = todoStore.first(where: { $0.id == id }) else { return completion(nil, .notFound) } completion(todo, nil) } ``` 3. Run the project and re-run the tests by reloading the test page in the browser. The first nine tests now pass. The tenth fails with the following: :x: `can change the todo's title by PATCHing to the todo's url` ``` PATCH http://localhost:8080/0 FAILED 404: Not Found (Cannot PATCH /0.) ``` Refresh SwaggerUI and experiment with using the POST route to create ToDo items, then using the GET route on `/{id}` to retrieve the stored items by ID. ### 9. Add Support for handling a `PATCH` request on `/:id` The failing test is trying to `PATCH` a specific ToDo item. A `PATCH` request updates an existing item by updating any fields sent as part of the `PATCH` request. This means that a field by field update needs to be done. 1. Register a handler for a `PATCH` request on `/`: ```swift router.patch("/", handler: updateHandler) ``` 2. Implement the `updateHandler()` that receives an `id` and responds with the updated ToDo item: ```swift func updateHandler(id: Int, new: ToDo, completion: (ToDo?, RequestError?) -> Void ) { guard let index = todoStore.index(where: { $0.id == id }) else { return completion(nil, .notFound) } var current = todoStore[index] current.user = new.user ?? current.user current.order = new.order ?? current.order current.title = new.title ?? current.title current.completed = new.completed ?? current.completed execute { todoStore[index] = current } completion(current, nil) } ``` 3. Run the project and rerun the tests by reloading the test page in the browser. Twelve tests should now be passing, with the thirteenth failing as follows: :x: `can delete a todo making a DELETE request to the todo's url` ``` DELETE http://localhost:8080/0 FAILED 404: Not Found (Cannot DELETE /0.) ``` Refresh SwaggerUI and experiment with using the POST route to create ToDo items, then using the PATCH route to update an existing item. For example, if you have a ToDo item at `http://localhost:8080/0` with a title of "mow the lawn", you can change its title by issuing a PATCH with id 0 and this JSON input: ``` { "title": "wash the dog" } ``` You should see a response code of 200 with a response body of: ``` { "id": 0, "title": "wash the dog", "completed": false, "url": "http://localhost:8080/0" } ``` ### 10. Add Support for handling a DELETE request on `/:id` The failing test is trying to `DELETE` a specific ToDo item. To fix this you need an additional route handler for `DELETE` that this time accepts an ID as a parameter. 1. Register a handler for a `DELETE` request on `/`: ```swift router.delete("/", handler: deleteOneHandler) ``` 2. Implement the `deleteOneHandler()` that receives an `id` and removes the specified ToDo item: ```swift func deleteOneHandler(id: Int, completion: (RequestError?) -> Void ) { guard let index = todoStore.index(where: { $0.id == id }) else { return completion(.notFound) } execute { todoStore.remove(at: index) } completion(nil) } ``` 3. Run the project and rerun the tests by reloading the test page in the browser. All sixteen tests should now be passing! ### Congratulations, you've built a Kitura backend for the [Todo-Backend](https://www.todobackend.com) project! ## Next Steps ### 1. Try out the ToDo-Backend web client Now try visiting [https://todobackend.com/client/](https://todobackend.com/client/) in your browser to view the ToDo-Backend web client. Enter an API root of `http://localhost:8080/` and use the website to interact with your REST API. You can add, remove and update ToDo items as you wish. ### 2. Attach a database to your project Our [ORM tutorial](https://github.com/IBM/ToDoBackend/blob/master/DatabaseWorkshop.md) builds upon the project created in this Workshop and replaces storing ToDos in an Array with a database running locally on your machine, using an ORM (Object Relational Mapper) and PostgreSQL.
42.650526
350
0.706846
eng_Latn
0.982368
dacf2297f00e12bc0d0289021eec2012d77661b8
967
md
Markdown
docs/agile_deep_dive.md
woowade/DevOpsForDefense
671ab0269a6bf36e74aba6fae253dc2ab822e5a9
[ "MIT" ]
8
2018-07-11T06:33:29.000Z
2022-01-27T15:14:27.000Z
docs/agile_deep_dive.md
woowade/DevOpsForDefense
671ab0269a6bf36e74aba6fae253dc2ab822e5a9
[ "MIT" ]
8
2018-02-01T16:02:35.000Z
2021-12-06T19:21:23.000Z
docs/agile_deep_dive.md
woowade/DevOpsForDefense
671ab0269a6bf36e74aba6fae253dc2ab822e5a9
[ "MIT" ]
2
2020-03-16T01:54:36.000Z
2021-02-17T15:26:25.000Z
--- layout: series title: "Agile Deep Dive" description: "A Survey of Agile topics Tailored to the Defense Industry." image: "/images/Agile_Cycle.jpeg" image-attrib: "Lazaro Ibanez on Medium" image-attrib-url: "https://medium.com/@LazaroIbanez/a-quick-overview-to-agile-5c87ffc9e0f2" series-id: agile-deep-dive sitemap: priority: 0.6 lastmod: 2019-06-14 changefreq: monthly --- ## Agile Deep Dive Many of our programs are going through the transition from Waterfall to Agile. This is a major change in how we work and even how we think. Agile challenges the long standing bureaucrocies we've build into our programs. Agile challenges the command-and-control / approval cultures we've been conditioned to accept as normal. The DevOps for Defense is doing a series of presentations on Agile program execution to help our community understand the details of Agile, how to do Agile right, and how to tailor Agile to our industry.
43.954545
119
0.755946
eng_Latn
0.978627
dad0cc865af50a6aa898e5bca020de36e7540d04
787
md
Markdown
README.md
twister077/OEECollectorAndWeb
40ebe82bc63b16be0ff1988c4a1e3ee84e1ed195
[ "MIT" ]
2
2017-03-28T20:36:03.000Z
2017-06-23T14:01:50.000Z
README.md
twister077/OEECollectorAndWeb
40ebe82bc63b16be0ff1988c4a1e3ee84e1ed195
[ "MIT" ]
null
null
null
README.md
twister077/OEECollectorAndWeb
40ebe82bc63b16be0ff1988c4a1e3ee84e1ed195
[ "MIT" ]
1
2020-12-27T08:03:23.000Z
2020-12-27T08:03:23.000Z
# OEECollectorAndWeb A VB.NET OEE Collecting application that syncs to a Web2Py webserver written in Python. On the Web2py webserver a standard dashboard and configuration menu is available, including defining countries, plants, departments, machines, etc. The OEE collecting can be done from an ET-7052 or a PET-7052 (PoE) Modbus module. An serial Expert Modbus module is also supported. There is even a small code for connecting to a custom ESP8266 Modbus server (see my other Github projects for the ESP8266 Modbus solution). This solution is still in beta, but due to other responsibilities I will not be able to actively contribute to this project. But it's also a small step away from being used in a production environment. Please feel free to contact me for further details.
87.444444
272
0.80432
eng_Latn
0.999356
dad1031ff22883547097afa7b8007d7cd6daae67
576
md
Markdown
README.md
fabiocicerchia/cloud-phoenix-kata
a8822316b03ed44501b39d43b7ae404120809967
[ "MIT" ]
null
null
null
README.md
fabiocicerchia/cloud-phoenix-kata
a8822316b03ed44501b39d43b7ae404120809967
[ "MIT" ]
1
2019-11-13T20:14:48.000Z
2019-11-13T20:14:48.000Z
README.md
fabiocicerchia/cloud-phoenix-kata
a8822316b03ed44501b39d43b7ae404120809967
[ "MIT" ]
null
null
null
# Cloud Phoenix Kata [![Build Status](https://travis-ci.org/fabiocicerchia/cloud-phoenix-kata.svg?branch=master)](https://travis-ci.org/fabiocicerchia/cloud-phoenix-kata) ## Stack - [Terraform](https://www.terraform.io/) - [Kubernetes](https://kubernetes.io/) + [Helm](https://helm.sh/) - [minikube](https://minikube.sigs.k8s.io/) / [AWS EKS](https://aws.amazon.com/eks/) - [TravisCI](https://travis-ci.org/) ## Documentation All the documentation is available in the folder [`docs`](docs). ## License MIT Licensed. See [LICENSE](docs/LICENSE.md) for full details.
30.315789
149
0.704861
yue_Hant
0.338997
dad1412a88cc5ba2ebb995412da97689d8601c73
1,504
md
Markdown
tools/c7n_org/README.md
fadiguezel/cloud-custodian
147fffcf9a109ffd4248a746775b55def5a727c5
[ "Apache-2.0" ]
null
null
null
tools/c7n_org/README.md
fadiguezel/cloud-custodian
147fffcf9a109ffd4248a746775b55def5a727c5
[ "Apache-2.0" ]
1
2021-02-24T04:42:37.000Z
2021-02-24T04:42:37.000Z
tools/c7n_org/README.md
fadiguezel/cloud-custodian
147fffcf9a109ffd4248a746775b55def5a727c5
[ "Apache-2.0" ]
1
2021-01-26T10:03:21.000Z
2021-01-26T10:03:21.000Z
## What is c7n-org? c7n-org is a tool to run custodian against multiple accounts at once. ## Installation ```shell pip install c7n-org ``` c7n-org has 3 run modes: ```shell Usage: c7n-org [OPTIONS] COMMAND [ARGS]... custodian organization multi-account runner. Options: --help Show this message and exit. Commands: report report on a cross account policy execution. run run a custodian policy across accounts run-script run an aws script across accounts ``` In order to run c7n-org against multiple accounts, a config file must first be created containing pertinent information about the accounts: ```yaml accounts: - account_id: '123123123123' name: account-1 regions: - us-east-1 - us-west-2 role: arn:aws:iam::123123123123:role/CloudCustodian tags: - type:prod - division:some division - partition:us - scope:pci ... ``` ## Running a Policy with c7n-org To run a policy, the following arguments must be passed in: ```shell -c | accounts config file -s | output directory -u | policy ``` ```shell c7n-org run -c custodian-all-us.yml -s output -u test.yml --dryrun ``` After running the above command, the following folder structure will be created: ``` output |_ account-1 |_ us-east-1 |_ policy-name |_ resources.json |_ custodian-run.log |_ us-west-2 |_ policy-name |_ resources.json |_ custodian-run.log |- account-2 ... ```
19.789474
139
0.654255
eng_Latn
0.958835
dad2f27324f7371cc84b63f3305cc17dbb67d38f
35
md
Markdown
README.md
Stephen-Callum/test-project
e260861230cb7daf2e2aa719d74c62f39f0ed7ad
[ "MIT" ]
null
null
null
README.md
Stephen-Callum/test-project
e260861230cb7daf2e2aa719d74c62f39f0ed7ad
[ "MIT" ]
1
2020-02-26T10:30:03.000Z
2020-02-26T12:16:45.000Z
README.md
Stephen-Callum/test-project
e260861230cb7daf2e2aa719d74c62f39f0ed7ad
[ "MIT" ]
null
null
null
# test-project Test github project
11.666667
19
0.8
nld_Latn
0.528441
dad352d447b418784b081af6fd87e078023c358a
1,727
md
Markdown
api/Word.SynonymInfo.Word.md
qiezhenxi/VBA-Docs
c49aebcccbd73eadf5d1bddc0a4dfb622e66db5d
[ "CC-BY-4.0", "MIT" ]
1
2018-10-15T16:15:38.000Z
2018-10-15T16:15:38.000Z
api/Word.SynonymInfo.Word.md
qiezhenxi/VBA-Docs
c49aebcccbd73eadf5d1bddc0a4dfb622e66db5d
[ "CC-BY-4.0", "MIT" ]
null
null
null
api/Word.SynonymInfo.Word.md
qiezhenxi/VBA-Docs
c49aebcccbd73eadf5d1bddc0a4dfb622e66db5d
[ "CC-BY-4.0", "MIT" ]
null
null
null
--- title: SynonymInfo.Word Property (Word) keywords: vbawd10.chm161153025 f1_keywords: - vbawd10.chm161153025 ms.prod: word api_name: - Word.SynonymInfo.Word ms.assetid: ec019502-6dc7-16f8-b019-957b00a7e3d1 ms.date: 06/08/2017 --- # SynonymInfo.Word Property (Word) Returns the word or phrase that was looked up by the thesaurus. Read-only **String** . ## Syntax _expression_. `Word` _expression_ An expression that returns a '[SynonymInfo](Word.SynonymInfo.md)' object. ## Remarks The thesaurus will sometimes look up a shortened version of the string or range used to return the **SynonymInfo** object. The **Word** property allows you to see the exact string that was used. ## Example This example returns a list of synonyms for the first meaning of the third word in the active document. ```vb Sub Syn() Dim mySynObj As Object Dim SList As Variant Dim i As Variant Set mySynObj = ActiveDocument.Words(3).SynonymInfo SList = mySynObj.SynonymList(1) For i = 1 To UBound(SList) MsgBox "A synonym for " & mySynObj.Word _ & " is " & SList(i) Next i End Sub ``` This example checks to make sure that the word or phrase that was looked up isn't empty. If it is not, the example returns a list of synonyms for the first meaning of the word or phrase. ```vb Sub SelectWord() Dim mySynObj As Object Dim SList As Variant Dim i As Variant Set mySynObj = Selection.Range.SynonymInfo If mySynObj.Word = "" Then MsgBox "Please select a word or phrase" Else SList = mySynObj.SynonymList(1) For i = 1 To UBound(SList) MsgBox "A synonym for " & mySynObj.Word _ & " is " & SList(i) Next i End If End Sub ``` ## See also [SynonymInfo Object](Word.SynonymInfo.md)
21.320988
195
0.719745
eng_Latn
0.952982
dad40eaeb131854dcd7815a7ad79a4c8359c10e2
482
md
Markdown
translations/ja-JP/content/github/writing-on-github/getting-started-with-writing-and-formatting-on-github/index.md
TR1GG3RElF/docs
44a7d2cc3888d8a72acd60c4d661ea9f146e1d59
[ "CC-BY-4.0", "MIT" ]
13
2020-11-03T19:24:46.000Z
2021-12-18T16:28:25.000Z
translations/ja-JP/content/github/writing-on-github/getting-started-with-writing-and-formatting-on-github/index.md
Tofani1/docs
55171e3e14176829128195edb18405e2c04703a5
[ "CC-BY-4.0", "MIT" ]
110
2021-10-11T21:31:16.000Z
2022-03-28T06:40:55.000Z
translations/ja-JP/content/github/writing-on-github/getting-started-with-writing-and-formatting-on-github/index.md
Tofani1/docs
55171e3e14176829128195edb18405e2c04703a5
[ "CC-BY-4.0", "MIT" ]
2
2021-12-10T00:00:30.000Z
2021-12-27T14:37:40.000Z
--- title: GitHub で書き、フォーマットしてみる redirect_from: - /articles/markdown-basics/ - /articles/things-you-can-do-in-a-text-area-on-github/ - /articles/getting-started-with-writing-and-formatting-on-github intro: GitHub の Issue、プルリクエスト、およびウィキでは、シンプルな機能を使用してコメントをフォーマットしたり他のユーザとやりとりしたりできます。 versions: fpt: '*' ghes: '*' ghae: '*' ghec: '*' children: - /about-writing-and-formatting-on-github - /basic-writing-and-formatting-syntax shortTitle: Start writing on GitHub ---
25.368421
83
0.73444
eng_Latn
0.335579
dad43d2f53cba30b1d4c67a26cdb0ef3e8052894
36
md
Markdown
README.md
joeyhuang591/react_project
899f4338aaf797d158a256009c689df86de6555f
[ "MIT" ]
null
null
null
README.md
joeyhuang591/react_project
899f4338aaf797d158a256009c689df86de6555f
[ "MIT" ]
null
null
null
README.md
joeyhuang591/react_project
899f4338aaf797d158a256009c689df86de6555f
[ "MIT" ]
null
null
null
# Video Search App based on React
9
33
0.722222
eng_Latn
0.762489
dad46ffe7d7bcd96a24e8c52528e9d2c354f5d1e
2,853
md
Markdown
README.md
RyanMans/XKKit
1f44e174368aa0c25544cdcd49e67fdc5b7af661
[ "MIT" ]
null
null
null
README.md
RyanMans/XKKit
1f44e174368aa0c25544cdcd49e67fdc5b7af661
[ "MIT" ]
null
null
null
README.md
RyanMans/XKKit
1f44e174368aa0c25544cdcd49e67fdc5b7af661
[ "MIT" ]
null
null
null
# XKKit [![CI Status](http://img.shields.io/travis/ALLen、LAS/XKKit.svg?style=flat)](https://travis-ci.org/ALLen、LAS/XKKit) [![Version](https://img.shields.io/cocoapods/v/XKKit.svg?style=flat)](http://cocoapods.org/pods/XKKit) [![License](https://img.shields.io/cocoapods/l/XKKit.svg?style=flat)](http://cocoapods.org/pods/XKKit) [![Platform](https://img.shields.io/cocoapods/p/XKKit.svg?style=flat)](http://cocoapods.org/pods/XKKit) ## Example To run the example project, clone the repo, and run `pod install` from the Example directory first. ## Method 消息处理中心 : XKMsgCenter ![](images/msgcenter.png) 常见的GCD 方法封装: ![](images/gcd.png) ## Installation XKKit is available through [CocoaPods](http://cocoapods.org). To install it, simply add the following line to your Podfile: ```ruby pod 'XKKit' ``` ### Xcode制作framework流程 ``` 在日程项目开发中可以使用用framework打包的静态库能够包含图片、xib等资源。 而用.a打包的静态库,我们需要再建立一个Bundle文件来包含图片、xib等资源 ``` 1. 先创建-个framework项目 ![](images/create.png) 2. 将链接时生成的执行文件类型设置成`static`类型/动态库时则选择`dynamic`类型 ![](images/static.png) 3. 设置framework支持多种架构配置(Architecture的配置) ``` 真机(armv7=3gs-4s,armv7s=5-5c, arm64=5s-6plus) 模拟器(i386=3gs-5,x86_64=5s-6plus) ``` ![](images/architecture.png) 4. 将xib、图像资源包含到framework中,将需要暴露的.h文件放在`Public`中,需要编译的.m文件放在`Compile Sources` ![](images/4.png) 5. 在建立framework时自动生成的头文件中加入要`暴露的头文件.h`,否则会报Warning ![](images/header.png) 6. 选中真机/模拟器,Command+B 编译,生成只支持真机/模拟器的静态库 7. 对.framework 右键 Show In Finder 找到对应的静态库 ![](images/finder.png) 8. 对真机和模拟器生成的 `.framework ` 进行验证( Terminal-终端 验证) ![](images/checkfinder.png) ![](images/terminal.png) ### 使用Aggregate制作通用的framework ``` 制作通用库主要利用到Aggregate,其主要有两个特性 1、能给一次编译多个target 2、能够在编译的时候运行自定义脚本(target也可以) ``` 1. 先创建-个Aggregate 文件 ![](images/addgate.png) ![](images/aggregate.png) 2. 编写自定义shell脚本 ![](images/shell.png) #####脚本命令: ``` if [ "${ACTION}" = "build" ] then INSTALL_DIR=${SRCROOT}/Products/${PROJECT_NAME}.framework DEVICE_DIR=${BUILD_ROOT}/${CONFIGURATION}-iphoneos/${PROJECT_NAME}.framework SIMULATOR_DIR=${BUILD_ROOT}/${CONFIGURATION}-iphonesimulator/${PROJECT_NAME}.framework if [ -d "${INSTALL_DIR}" ] then rm -rf "${INSTALL_DIR}" fi mkdir -p "${INSTALL_DIR}" cp -R "${DEVICE_DIR}/" "${INSTALL_DIR}/" lipo -create "${DEVICE_DIR}/${PROJECT_NAME}" "${SIMULATOR_DIR}/${PROJECT_NAME}" -output "${INSTALL_DIR}/${PROJECT_NAME}" open "${SRCROOT}/Products" fi ``` 2. 运行Aggregate,验证framework 支持类型 ![](images/run1.png) ![](images/run2.png) ### 使用framework静态库 * 新建一个工程,将framework静态库拖入工程中,并选中copy item选项 * 在`Build Phrase`中配置如下,Copy Bundle Resources会将资源保存到Main Bundle中,`可以使用NSBundle接口获取资源` ![](images/2-2.png) * 在头文件中添加需要的库的头文件 ![](images/23.png) ## Author ALLen、LAS, [email protected] ## License XKKit is available under the MIT license. See the LICENSE file for more info.
19.277027
120
0.712583
yue_Hant
0.60288
dad48d4d0129582393450d12977c8b8db5fd623f
2,718
md
Markdown
_posts/2021-01-24-gaussian-skiers.md
joshmaxsilverman/joshmaxsilverman.github.io
3df820b7ec46a8e05fc8a9bf01223bdcb94fdc28
[ "MIT" ]
null
null
null
_posts/2021-01-24-gaussian-skiers.md
joshmaxsilverman/joshmaxsilverman.github.io
3df820b7ec46a8e05fc8a9bf01223bdcb94fdc28
[ "MIT" ]
null
null
null
_posts/2021-01-24-gaussian-skiers.md
joshmaxsilverman/joshmaxsilverman.github.io
3df820b7ec46a8e05fc8a9bf01223bdcb94fdc28
[ "MIT" ]
null
null
null
--- layout: post published: true title: Gaussian Skiers date: 2021/01/24 --- >**Question**: You're in your town's heat to head marble racing championship, the traditional way to determine who is the town's next mayor. The race is split into two heats and your time in either heat is a random, normally distributed variable. If you have the fastest time in the first run, what is the probability $P_\text{win it all}$ that you end up winning the event, as determined by the sum of your times on heat run? Extra credit: what if there are $29$ other candidates in the race? <!--more--> ([FiveThirtyEight](https://fivethirtyeight.com/features/can-you-skillfully-ski-the-slopes/)) ## Solution If the first skier wins the first round by $\Delta T_1,$ they will prevail so long as they don't lose the second round by more than $\Delta T_1.$ The gap between the first round times $\left(\Delta T_1 = t^\text{A}_1 - t^\text{B}_1\right)$ which has some symmetric distribution $P(\Delta T_1).$ Since we know that person $\text{A}$ won the first round, we condition on the left side of $P$ and the expected value of $\Delta T_1$ is whatever the $25^\text{th}$ percentile of the distribution is. If person $\text{A}$ is to win overall, $\Delta T_1$ has to be less than or equal to $-\Delta T_2.$ The gap in the second round is a random variable from the same distribution, so the chance that $\Delta T_1 < -\Delta T_2$ (and, therefore, that player $\text{A}$ wins) is $P_2 = 1 - 0.25 = 0.75.$ ### Pushing on There are several ways to manifest the result above through calculation but none of the them yielded to generalization. An approximate attempt yielded good results for low $N$ but broke down as $N$ grew, notably resisting the stable plateauing that persists near $30\%$ for a wide range of $N.$ A simulation suggests a roughly linear decrease on log-log axes, and yields $P_{30} \approx 0.314409.$ Its overall behavior is decently approximated by $P \approx x^{-1/3}$ over a wide range: ![](/img/FE48C7B9-2B85-4CF5-AD7E-BE6190F97836.png){:width="400 px" class="image-centered"} {:.caption} **Fig:** plot of $\log P(\text{first round winner wins})$ vs $\log N.$ ```python def round(N): data = [np.random.normal() for _ in range(N)] first_win = data.index(min(data)) for i in range(N): data[i] += np.random.normal() overall_win = data.index(min(data)) if first_win == overall_win: return 1 else: return 0 domain = range(2, 50, 2) datapoints = [np.mean([round(N) for _ in range(100000)]) for N in domain] ``` My attempts at an approximate solution for $N=30$ got the right shape, but took too long to "turn up". Updates forthcoming if I make progress. <br>
52.269231
493
0.712288
eng_Latn
0.998129
dad5320256bf03f49b340baee2cbdce9a73f735c
83
md
Markdown
README.md
xhp/android-library
0c08cceb551455253db2efe7dd6976bfd2af10d6
[ "MIT" ]
null
null
null
README.md
xhp/android-library
0c08cceb551455253db2efe7dd6976bfd2af10d6
[ "MIT" ]
null
null
null
README.md
xhp/android-library
0c08cceb551455253db2efe7dd6976bfd2af10d6
[ "MIT" ]
null
null
null
# android-library android library dependencies, Android studio gradle. # list .
10.375
52
0.759036
eng_Latn
0.356631
dad5b10ff353ccf29b6fecbdaab7a4179fe98928
31
md
Markdown
firstcontributions.md
ctlctl/ncov-report
1958445fca5f4d5f4537d0bc02414a21063b87e8
[ "Apache-2.0" ]
1
2021-08-12T15:07:08.000Z
2021-08-12T15:07:08.000Z
firstcontributions.md
ctlctl/ncov-report
1958445fca5f4d5f4537d0bc02414a21063b87e8
[ "Apache-2.0" ]
null
null
null
firstcontributions.md
ctlctl/ncov-report
1958445fca5f4d5f4537d0bc02414a21063b87e8
[ "Apache-2.0" ]
null
null
null
my first commit to cov-report
15.5
30
0.774194
eng_Latn
0.999998
dad6386fef1e433c91d22e2fc0c8d390b024f501
88
md
Markdown
LeetCode2020April/week3/src/main/kotlin/net/twisterrob/challenges/leetcode2020april/week3/valid_parenthesis/README.md
TWiStErRob/TWiStErRob1
83fde3a03ea2616f73fe2ffb43aed5631b31987e
[ "Unlicense" ]
9
2016-09-11T05:52:24.000Z
2021-05-31T21:35:11.000Z
LeetCode2020April/week3/src/main/kotlin/net/twisterrob/challenges/leetcode2020april/week3/valid_parenthesis/README.md
TWiStErRob/TWiStErRob
c047f0fdc2db7da61a5a00132acee4ada34434b0
[ "Unlicense" ]
139
2016-01-28T18:25:44.000Z
2022-01-02T18:15:51.000Z
LeetCode2020April/week3/src/main/kotlin/net/twisterrob/challenges/leetcode2020april/week3/valid_parenthesis/README.md
TWiStErRob/TWiStErRob1
83fde3a03ea2616f73fe2ffb43aed5631b31987e
[ "Unlicense" ]
2
2016-05-13T02:29:11.000Z
2018-10-30T15:43:39.000Z
https://leetcode.com/explore/featured/card/30-day-leetcoding-challenge/530/week-3/3301/
44
87
0.806818
kor_Hang
0.16999
dad644bd881afbaeda6a1f1043dce16b69f6a797
1,665
md
Markdown
CHANGELOG.md
axelson/elixir
accadf541c2045844b0c399adae22476840b3602
[ "Apache-2.0" ]
null
null
null
CHANGELOG.md
axelson/elixir
accadf541c2045844b0c399adae22476840b3602
[ "Apache-2.0" ]
1
2019-04-25T12:52:49.000Z
2019-04-25T13:27:31.000Z
CHANGELOG.md
fertapric/elixir
9df2216670493aa30f37681cc812f3192adfe55a
[ "Apache-2.0" ]
null
null
null
# Changelog for Elixir v1.10 ## v1.10.0-dev ### 1. Enhancements #### Elixir * [Code] Add `:token_metadata` and `:literal_encoder` support to `Code.string_to_quoted/2` * [DateTime] Add `DateTime.now!/2` and `DateTime.shift_zone!/3` * [String] Update to Unicode 12.1 #### Mix * [mix deps.unlock] Add `--check-unused` flag ### 2. Bug fixes #### IEx * [IEx] Exit IEx session if the group leader exits #### Mix * [Mix.Project] Make sure `MIX_BUILD_PATH` specifies only the `build_path` prefix and that env+target are still concatenated ### 3. Soft-deprecations (no warnings emitted) ### 4. Hard-deprecations #### Elixir * [Code] `Code.load_file/2` has been deprecated in favor of `Code.require_file/2` or `Code.compile_file/2` * [Code] `Code.loaded_files/0` and `Code.unload_file/1` have been deprecated in favor of `Code.required_files/0` and `Code.unrequire_file/1` respectively * [String] `String.normalize/2` has been deprecated in favor of `:unicode.characters_to_nfc_binary/1` or `:unicode.characters_to_nfd_binary/1` which ship as part of Erlang/OTP 20+ * [Supervisor] The `Supervisor.Spec` module and its functions have been deprecated in favor of the new Supervisor child specification * [Supervisor] The `:simple_one_for_one` strategy in `Supervisor` has been deprecated in favor of `DynamicSupervisor` #### Logger * [Logger] `:compile_time_purge_level` application environment configuration has been deprecated in favor of the more general `:compile_time_purge_matching` config ## v1.9 The CHANGELOG for v1.9 releases can be found [in the v1.9 branch](https://github.com/elixir-lang/elixir/blob/v1.9/CHANGELOG.md).
36.195652
181
0.733333
eng_Latn
0.880377
dad6eac8f87692a8c3865f28e1bd0642ab6d5890
4,212
md
Markdown
docs/src/main/paradox/user/lang/javascript/gettingstarted.md
dwijnand/cloudstate
f572eb88a7251565e29110aa0af1919dd989c79e
[ "Apache-2.0" ]
4
2019-10-21T15:52:42.000Z
2021-11-17T10:35:48.000Z
docs/src/main/paradox/user/lang/javascript/gettingstarted.md
dwijnand/cloudstate
f572eb88a7251565e29110aa0af1919dd989c79e
[ "Apache-2.0" ]
null
null
null
docs/src/main/paradox/user/lang/javascript/gettingstarted.md
dwijnand/cloudstate
f572eb88a7251565e29110aa0af1919dd989c79e
[ "Apache-2.0" ]
1
2020-05-09T00:18:20.000Z
2020-05-09T00:18:20.000Z
# Getting started with stateful services in JavaScript ## Prerequisites Node version : Cloudstate uses the [grpc node package](https://github.com/grpc/grpc-node), which compiles a native gRPC implementation using node-gyp. It requires a minimum of node 4.0. Build tool : Cloudstate doesn't require any particular build tool, though npm install scripts do need to be run. protoc : Cloudstate requires using the protoc compiler to serialize your protobuf definitions into the protobuf binary descriptor format. Helper scripts are provided to automatically download the protoc compiler for your platform and do this compilation. docker : Cloudstate runs in Kubernetes with [Docker], hence you will need Docker to build a container that you can deploy to Kubernetes. Once you have the above, you need to add the `cloudstate` package to your project, which can be done by running: ``` npm install cloudstate --save ``` ## Generating the protobuf descriptor set A Cloudstate user function needs to report to the Cloudstate proxy the gRPC descriptor that its serving, serialized to binary using the Protobuf [`FileDescriptorSet` message type](https://developers.google.com/protocol-buffers/docs/techniques#self-description). While there is some support for this in the [protobufjs](https://www.npmjs.com/package/protobufjs) library that is used by Cloudstate to load descriptors, it is somewhat incomplete and buggy. Hence, Cloudstate requires that you precompile this descriptor using `protoc` instead. Cloudstate provides a utility that does this for you, downloading the `protoc` binary for your platform, and running it with the necessary arguments and include paths. This can be run manually by running `node_modules/cloudstate/bin/compile-descriptor.js`, or we recommend adding it as a `prestart` script to your npm build: ```json { "scripts": { "prestart": "compile-descriptor my-descriptor.proto" } } ``` Multiple protobuf files can be passed, in addition, any arguments accepted by `protoc` can be passed, for example, if you are importing files from other directories, you can add those directories as an include path by adding `-Ipath/to/protobuf/dir`. By default, the descriptor is written to `user-function.desc`, if you wish to change this, you can set `--descriptor_set_out=my-descriptor.desc`. Note that if you output the descriptor to a different path, you will also need to pass that custom path to the constructor of the `CloudState` class when you got to instantiate it. ## package.json example A minimal `package.json` for a shopping cart example is shown below: @@@vars ```json { "name": "shopping-cart", "version": "0.1.0", "dependencies": { "cloudstate": "^$cloudstate.node-support.version$" }, "scripts": { "prestart": "compile-descriptor shoppingcart.proto", "start": "node index.js" } } ``` @@@ ## Protobuf files You can place protobuf files in your project wherever you like, for example, in the root directory, or in a directory named `protos`. In the `package.json` above we've placed the shopping cart application example shown earlier in @ref:[gRPC descriptors](../../features/grpc.md) in a file in the root folder called `shoppingcart.proto`. ## Creating and starting a server There are two ways to create and start a Cloudstate gRPC server. The first is to create an @extref:[`Entity`](jsdoc:Entity.html), and invoke @extref:[`start`](jsdoc:Entity.html#start) on it. This allows creating a server that serves a single entity, with the default options. We'll look at this more in the subsequent pages. Alternatively, you can use the @extref:[`CloudState`](jsdoc:CloudState.html) class, add one or more entities to it, and then invoke @extref:[`start`](jsdoc:CloudState.html#start), like so: @@snip [index.js](/docs/src/test/js/test/gettingstarted/index.js) { #start } If you created your protobuf file descriptor set at a different location to the default of `user-function.desc`, you can configure that here: @@snip [index.js](/docs/src/test/js/test/gettingstarted/index.js) { #custom-desc } For the full range of options available on the `CloudState` class, see @extref:[`CloudState~options`](jsdoc:CloudState.html#~options).
54
540
0.765195
eng_Latn
0.991874
dad745a4c610f822e2471526dc8eb9fa5761c925
3,072
md
Markdown
README.md
RomainVernoux/svg1k
00c890f77714a872cff001daf7c244b4831fccb3
[ "CC0-1.0" ]
9
2020-06-13T07:35:46.000Z
2020-08-11T23:12:28.000Z
README.md
RomainVernoux/svg1k
00c890f77714a872cff001daf7c244b4831fccb3
[ "CC0-1.0" ]
6
2020-06-13T11:17:14.000Z
2020-10-14T14:39:05.000Z
README.md
RomainVernoux/svg1k
00c890f77714a872cff001daf7c244b4831fccb3
[ "CC0-1.0" ]
7
2020-06-12T21:23:03.000Z
2020-08-11T23:04:48.000Z
# SVG1K SVG is an underrated web technology and the SVG1K project intend to show case how far it's possible to go when creative creators are limited to 1Ko of data. If you are a web developer, you should consider contributing to this project, it's a fun way to show your creativity as well as your technical skills while learning new stuff. Be creative, be smart, show your skills! https://jeremiepat.github.io/svg1k/ ## Contribute Contribution is super easy if you follow this steps: 1. Create a amazing standalone, non-compressed, SVG file which is less or equal than 1Ko (1024 characters). By standalone we mean it must be understood as a valid SVG file by web browsers, in other words you don't need to add the xml prologue but you must add the SVG namespace on the outermost SVG element. 2. Create a pull request against that repository with your SVG file within the `img` folder and the details of your submission within the `js/entries.js` file (the name of your file, your name/pseudo for credits and an optional link) 3. Wait for review and brag when it's merged. > **NOTE:** _Be aware that all contributions are made under the terms of [the CC0 1.0 Public Domain license](https://creativecommons.org/publicdomain/zero/1.0/)_ ## FAQ ### I don't understand Git, can I contribute without making a PR? Of course, making a GitHub PR is the easiest and fastest way for us to review and provide feedback, but it isn't a hard requirement. Feel free to create your kick ass SVG demo anywhere you want (for example, [codepen.io](https://codepen.io/), [dabblet](https://dabblet.com/), [jsfiddle](https://jsfiddle.net/), etc.), then if you have no one around you that could handle the PR for you, feel free to ping @JeremiePat (either by opening an issue, on twitter, or by e-mail) to get your submission included. ### I'm new at SVG, will my submission be featured on the site? Whatever your knowledge of SVG, we encourage you to push the boundaries and experiment! While we don't have any formal acceptance criteria, we will showcase the submissions we feel best push the limits of the technology. We can't guarantee your contribution will be accepted, but we will do our best to provide constructive feedback on your PR to help you improve your submission if we see a way to make it even better! ### 1Ko is too small, can I contribute with a larger file? Currently we want to get stick to 1Ko, however we are considering to change that limit or to create new categories at some point in the future. Please convince us that your submission is so crazy and push the limit of the technology so fare that we should accept it. ### Can I submit a compressed SVG file (with the extension `.svgz`)? No, in this context, compression provides no value. One of the intent of the SVG1K project is to be used by web developers as a pedagogic resource to learn new technics with SVG (either by learning from the demos or by submitting PRs that will be reviewed thoroughly). Because compression is a form of hard obfuscation it goes against that pedagogic intent.
73.142857
505
0.772786
eng_Latn
0.999371
dad7d87a173923b909e50f3464ef79c5fd2ae9fa
4,390
md
Markdown
docs/reference/menu.md
jbnunn/webots
4f07a74a0631952a7b098e99f0ab92878051e256
[ "Apache-2.0" ]
2
2019-07-12T13:47:44.000Z
2019-08-17T02:53:54.000Z
docs/reference/menu.md
golbh/webots
8e74fb8393d1e3a6540749afc492635c43f1b30f
[ "Apache-2.0" ]
null
null
null
docs/reference/menu.md
golbh/webots
8e74fb8393d1e3a6540749afc492635c43f1b30f
[ "Apache-2.0" ]
1
2019-06-29T06:34:14.000Z
2019-06-29T06:34:14.000Z
# Table of Contents - [Thanks](thanks.md) - [Introduction](introduction.md) - [Nodes and Functions](nodes-and-functions.md) - [ODE: Open Dynamics Engine](ode-open-dynamics-engine.md) - [Node Chart](node-chart.md) - [Nodes and API Functions](nodes-and-api-functions.md) - [Accelerometer](accelerometer.md) - [Appearance](appearance.md) - [Background](background.md) - [BallJoint](balljoint.md) - [BallJointParameters](balljointparameters.md) - [Box](box.md) - [Brake](brake.md) - [Camera](camera.md) - [Capsule](capsule.md) - [Charger](charger.md) - [Color](color.md) - [Compass](compass.md) - [Cone](cone.md) - [Connector](connector.md) - [ContactProperties](contactproperties.md) - [Coordinate](coordinate.md) - [Cubemap](cubemap.md) - [Cylinder](cylinder.md) - [Damping](damping.md) - [Device](device.md) - [DifferentialWheels](differentialwheels.md) - [DirectionalLight](directionallight.md) - [Display](display.md) - [DistanceSensor](distancesensor.md) - [ElevationGrid](elevationgrid.md) - [Emitter](emitter.md) - [Fluid](fluid.md) - [Focus](focus.md) - [Fog](fog.md) - [GPS](gps.md) - [Group](group.md) - [Gyro](gyro.md) - [HingeJoint](hingejoint.md) - [HingeJointParameters](hingejointparameters.md) - [Hinge2Joint](hinge2joint.md) - [ImageTexture](imagetexture.md) - [ImmersionProperties](immersionproperties.md) - [IndexedFaceSet](indexedfaceset.md) - [IndexedLineSet](indexedlineset.md) - [InertialUnit](inertialunit.md) - [Joint](joint.md) - [JointParameters](jointparameters.md) - [Joystick](joystick.md) - [Keyboard](keyboard.md) - [LED](led.md) - [Lens](lens.md) - [LensFlare](lensflare.md) - [Lidar](lidar.md) - [Light](light.md) - [LightSensor](lightsensor.md) - [LinearMotor](linearmotor.md) - [Material](material.md) - [Motor](motor.md) - [Mouse](mouse.md) - [Muscle](muscle.md) - [PBRAppearance](pbrappearance.md) - [Pen](pen.md) - [Physics](physics.md) - [Plane](plane.md) - [PointLight](pointlight.md) - [PointSet](pointset.md) - [PositionSensor](positionsensor.md) - [Propeller](propeller.md) - [Radar](radar.md) - [RangeFinder](rangefinder.md) - [Receiver](receiver.md) - [Recognition](recognition.md) - [Robot](robot.md) - [RotationalMotor](rotationalmotor.md) - [Shape](shape.md) - [SliderJoint](sliderjoint.md) - [Slot](slot.md) - [Solid](solid.md) - [SolidReference](solidreference.md) - [Speaker](speaker.md) - [Sphere](sphere.md) - [SpotLight](spotlight.md) - [Supervisor](supervisor.md) - [TextureCoordinate](texturecoordinate.md) - [TextureTransform](texturetransform.md) - [TouchSensor](touchsensor.md) - [Track](track.md) - [TrackWheel](trackwheel.md) - [Transform](transform.md) - [Viewpoint](viewpoint.md) - [WorldInfo](worldinfo.md) - [Zoom](zoom.md) - [Motion Functions](motion-functions.md) - [Motion](motion.md) - [PROTO](proto.md) - [PROTO Definition](proto-definition.md) - [PROTO Instantiation](proto-instantiation.md) - [PROTO Example](proto-example.md) - [Procedural PROTO Nodes](procedural-proto-nodes.md) - [Using PROTO Nodes with the Scene Tree](using-proto-nodes-with-the-scene-tree.md) - [PROTO Scoping Rules](proto-scoping-rules.md) - [PROTO Hidden Fields](proto-hidden-fields.md) - [PROTO Design Guidelines](proto-design-guidelines.md) - [Physics Plugin](physics-plugin.md) - [Introduction of the Physics Plugins](introduction-of-the-physics-plugins.md) - [Plugin Setup](plugin-setup.md) - [Callback Functions](callback-functions.md) - [Utility Functions](utility-functions.md) - [Structure of ODE Objects](structure-of-ode-objects.md) - [Compiling the Physics Plugin](compiling-the-physics-plugin.md) - [Examples](examples.md) - [ODE Improvements](ode-improvements.md) - [Troubleshooting](troubleshooting.md) - [Execution Scheme](execution-scheme.md) - [Webots World Files](webots-world-files.md) - [Generalities](generalities.md) - [Nodes and Keywords](nodes-and-keywords.md) - [DEF and USE](def-and-use.md) - [Other APIs](other-apis.md) - [ROS API](ros-api.md) - [Glossary](glossary.md)
35.12
87
0.650797
yue_Hant
0.754236
dad7dc5164416da0e9747a2601f6b3cb8940546b
1,729
md
Markdown
_posts/2020-01-22-the-presidents-golf-cart.md
kyclark/kyclark.github.io
ff1f08da5586313adb69c39730e3ae2bef065148
[ "MIT" ]
null
null
null
_posts/2020-01-22-the-presidents-golf-cart.md
kyclark/kyclark.github.io
ff1f08da5586313adb69c39730e3ae2bef065148
[ "MIT" ]
1
2021-03-30T03:05:38.000Z
2021-03-30T03:05:38.000Z
_posts/2020-01-22-the-presidents-golf-cart.md
kyclark/kyclark.github.io
ff1f08da5586313adb69c39730e3ae2bef065148
[ "MIT" ]
null
null
null
--- layout: post title: "The President's Golf Cart" date: 2020-01-22 14:00:00 -0700 --- Everyday I ride my bicycle to work at the University of Arizona. I go right past the University President's house, and his custom golf cart sits out front. I got to thinking about how his golf cart would look if it were built the way most software is designed. For instance, it’s almost literally a straight line from his house to his office in the the Old Main building on campus, so I imagine the cart would have no steering wheel. It would probably seat just one person. I doubt it would have brakes or even acceleration. It would just go one speed when you turned it on, in a straight line, and, when you get to the end, you would turn it off. When the President wants to go home, he’d have a few strong men pick up the cart and turn it around to point back towards his house. Since it’s usually sunny in Tucson, probably it would have no roof or windshield because those weren’t needed on the day the cart was built. So much of the software I’ve seen and used seems to built to solve exactly the problem the original coder had and little else. That means the programs have hard-coded values like paths to input files or databases or parameters that change the output. There is usually little to no documentation or any attempt to verify that the program would work under various conditions. I’d like to think that, at the end of reading my book, you would think about how a real golf cart seats more than one person, can go variables speeds and turn in different directions, how it has safety features like a roof and seatbelts, and is a generally useful vehicle that can be easily used by many people for many different purposes.
82.333333
339
0.784268
eng_Latn
0.999979
dad9954d3d830e56f381b3803f3eb4ac8de9583a
61
md
Markdown
docs/content/v2.0/api/ysql/syntax_resources/commands/deallocate.grammar.md
hstenzel/yugabyte-db
b25c8f4d7a9e66d106c41c446b71af870aefa304
[ "Apache-2.0", "CC0-1.0" ]
3,702
2019-09-17T13:49:56.000Z
2022-03-31T21:50:59.000Z
docs/content/v2.0/api/ysql/syntax_resources/commands/deallocate.grammar.md
hstenzel/yugabyte-db
b25c8f4d7a9e66d106c41c446b71af870aefa304
[ "Apache-2.0", "CC0-1.0" ]
9,291
2019-09-16T21:47:07.000Z
2022-03-31T23:52:28.000Z
docs/content/v2.0/api/ysql/syntax_resources/commands/deallocate.grammar.md
hstenzel/yugabyte-db
b25c8f4d7a9e66d106c41c446b71af870aefa304
[ "Apache-2.0", "CC0-1.0" ]
673
2019-09-16T21:27:53.000Z
2022-03-31T22:23:59.000Z
``` deallocate ::= DEALLOCATE [ PREPARE ] { name | ALL } ```
15.25
52
0.557377
yue_Hant
0.94615
dad9d9d3a2dcb69f675ff6dda07b7d1ee404aacb
829
md
Markdown
includes/redis-cache-service-limits.md
OpenLocalizationOrg/azuretest1_zh-CN
358681cf517ea1d0b86fff60a5f45d3c4762670d
[ "CC-BY-3.0" ]
null
null
null
includes/redis-cache-service-limits.md
OpenLocalizationOrg/azuretest1_zh-CN
358681cf517ea1d0b86fff60a5f45d3c4762670d
[ "CC-BY-3.0" ]
null
null
null
includes/redis-cache-service-limits.md
OpenLocalizationOrg/azuretest1_zh-CN
358681cf517ea1d0b86fff60a5f45d3c4762670d
[ "CC-BY-3.0" ]
null
null
null
--- ms.openlocfilehash: d5ead77f3d10d0c74577700cd74fbdcf2ffe2730 ms.sourcegitcommit: bab1265d669c3e6871daa7cb8a5640a47104947a translationtype: MT --- | 资源 | Limit | |---------------------------------------------|----------------------------------------| | 高速缓存大小 | 53 GB | | 数据库 | 16 | | 最大连接的客户端 | 20000 | | Redis (高可用性) 的缓存副本 | 1 | Azure Redis 高速缓存限制,并且大小都不同,每个定价层。 要查看定价层和它们相关联的大小,请参阅[Azure Redis 缓存定价](http://azure.microsoft.com/pricing/details/cache/)。 Azure Redis 缓存配置限制的详细信息,请参阅[默认 Redis 服务器配置](redis-cache/cache-configure.md#default-redis-server-configuration)。
55.266667
123
0.451146
yue_Hant
0.216005
dada232f592ab027f5351680f77b155d60f9cead
3,922
md
Markdown
docs/content/1-3-32.md
Higurashi-kagome/Algorithms-4th-Edition-in-Csharp
2605b05c77f5bce0e5a7a8dc9d10064af012bc36
[ "MIT" ]
1
2017-05-30T14:44:33.000Z
2017-05-30T14:44:33.000Z
docs/content/1-3-32.md
ikesnowy/Algorithms-4th-Edition
9873140cd5296dd4bc07bc05f0ee339757feddee
[ "MIT" ]
null
null
null
docs/content/1-3-32.md
ikesnowy/Algorithms-4th-Edition
9873140cd5296dd4bc07bc05f0ee339757feddee
[ "MIT" ]
null
null
null
--- title: 1.3.32 tags: Fundamental weight: 1332 date: 2018-05-18 17:00:36 --- # 1.3.32 ## 解答 在队列的基础上增加一个在队首插入元素的方法即可。 ## 代码 ### Steque.cs ```csharp /// <summary> /// Steque。 /// </summary> /// <typeparam name="TItem">Steque 中要存放的元素。</typeparam> public class Steque<TItem> : IEnumerable<TItem> { private Node<TItem> _first; private Node<TItem> _last; private int _count; private class Node<T> { public T Item; public Node<T> Next; } /// <summary> /// 默认构造函数。 /// </summary> public Steque() { _first = null; _count = 0; } /// <summary> /// 检查栈是否为空。 /// </summary> /// <returns></returns> public bool IsEmpty() { return _count == 0; } /// <summary> /// 返回栈内元素的数量。 /// </summary> /// <returns></returns> public int Size() { return _count; } /// <summary> /// 将一个元素压入栈中。 /// </summary> /// <param name="item">要压入栈中的元素。</param> public void Push(TItem item) { var oldFirst = _first; _first = new Node<TItem>(); _first.Item = item; _first.Next = oldFirst; if (oldFirst == null) { _last = _first; } _count++; } /// <summary> /// 将一个元素从栈中弹出,返回弹出的元素。 /// </summary> /// <returns></returns> public TItem Pop() { if (IsEmpty()) throw new InvalidOperationException("Stack Underflow"); var item = _first.Item; _first = _first.Next; _count--; if (_count == 0) { _last = null; } return item; } /// <summary> /// 将一个元素加入队列中。 /// </summary> /// <param name="item">要入队的元素。</param> public void Enqueue(TItem item) { var oldLast = _last; _last = new Node<TItem>(); _last.Item = item; _last.Next = null; if (IsEmpty()) _first = _last; else oldLast.Next = _last; _count++; } /// <summary> /// 返回栈顶元素(但不弹出它)。 /// </summary> /// <returns></returns> public TItem Peek() { if (IsEmpty()) throw new InvalidOperationException("Stack Underflow"); return _first.Item; } public override string ToString() { var s = new StringBuilder(); foreach (var n in this) { s.Append(n); s.Append(' '); } return s.ToString(); } public IEnumerator<TItem> GetEnumerator() { return new StackEnumerator(_first); } IEnumerator IEnumerable.GetEnumerator() { return GetEnumerator(); } private class StackEnumerator : IEnumerator<TItem> { private Node<TItem> _current; private Node<TItem> _first; public StackEnumerator(Node<TItem> first) { _current = new Node<TItem>(); _current.Next = first; _first = _current; } TItem IEnumerator<TItem>.Current => _current.Item; object IEnumerator.Current => _current.Item; void IDisposable.Dispose() { _current = null; _first = null; } bool IEnumerator.MoveNext() { if (_current.Next == null) return false; _current = _current.Next; return true; } void IEnumerator.Reset() { _current = _first; } } } ``` ### program.cs ```csharp // 见 Steque.cs var steque = new Steque<string>(); steque.Push("first"); steque.Push("second"); steque.Push("third"); steque.Enqueue("fourth"); Console.WriteLine(steque.ToString()); steque.Pop(); steque.Pop(); steque.Pop(); steque.Pop(); Console.WriteLine(steque.ToString()); steque.Enqueue("first"); steque.Push("zero"); Console.WriteLine(steque.ToString()); ```
19.038835
67
0.516318
kor_Hang
0.335686
dada6bd6f01c5e3d9bbc3b107be10b0bacbbca14
1,285
md
Markdown
_posts/2021-02-10-科研工作习惯.md
CryoECNU/CryoECNU.github.io
0572e44fb5819a3a27f8e56bba2273131351af99
[ "Apache-2.0" ]
null
null
null
_posts/2021-02-10-科研工作习惯.md
CryoECNU/CryoECNU.github.io
0572e44fb5819a3a27f8e56bba2273131351af99
[ "Apache-2.0" ]
null
null
null
_posts/2021-02-10-科研工作习惯.md
CryoECNU/CryoECNU.github.io
0572e44fb5819a3a27f8e56bba2273131351af99
[ "Apache-2.0" ]
null
null
null
--- layout: post title: 需要养成一定的科研工作习惯 date: 2021-02-09 categories: blog description: header-img: img/top.png #这篇文章标题背景图片 --- ## 需要养成一定的科研工作习惯 科研工作一般都经历时间比较长,也需要多人的协同工作。因此,养成良好的科研工作习惯,就成了一种需要。这种习惯的养成其实说多方面的,包括各种细节,比如文档管理、数据管理等等。 这里一点一点梳理总结出来,后面想起什么再更新。 ### 数据管理 1. 原始数据一定要保存好!包括数据说明文档。最好在至少两处不同的计算机或者其他空间里独立存档,千万不要单纯相信任何一个存储器,包括移动硬盘、计算机、笔记本电脑、网络存储等等。 2. 在每一个相对独立的研究中,数据可以单独存储在该工作路径下,主要保存从原始数据进行预处理的所有代码。以便日后能够追溯是如何处理、计算的。当然,代码的注释是需要有的,不能做到尽可能详细,也得做到重要的地方必须要标明。 ### 计算分析和画图 尽量将计算分析和画图的过程独立开来。也就是说,计算分析输出分析结果(一堆结果数据),画图只针对他们的输出结果来进行可视化。这样做的目的是避免在画图阶段反复计算数据,增加了计算时间,也增加了计算机程序的复杂程度。两者的相对独立能够使关注点更为突出,计算分析就是算出结果,而画图就是把数据转成好看的图形。 当然草图只要清晰表达意思就行了,但是最后成文的图需要反复调整,包括颜色、粗细、字体、图例、色棒等等细节。关于颜色的艺术,后续再专门做一些梳理。 ### 命名习惯 用简单几个关键词行程文件名的主体,至少一眼看去就知道这个文档是做什么的,包括计算机程序。比如,STEP01\_Calculation\_ALT\_from\_soil\_temp.py,一看就知道这个程序是用土壤温度计算活动层厚度的程序。 在文章或者别的修改中,最好需要加上日期作为标记,这样能够很好的保存追溯轨迹。当然,如果能够在Github这些上进行也不是不可以,但是WORD文档这些,在Git上也是不好处理的。还是在文件名加上日期标记比较稳妥。比如,MS\_PF\_Hydrological\_Impacts\_20200901.docx。 如果是多人合作的时候,往往还需要加上人名缩写,比如,MS\_PF\_Hydrological\_Impacts\_20200901\_KW.docx。 ### 文献管理 自己读过、翻过、扫过一眼的,最好都录入[<span style="color:red">**文献管理软件**</span>](https://cryoecnu.github.io/xtech/)里。自己读过的最好把PDF也放进去。 自己发表的论文,整理好,最好连着数据、计算分析、画图都打包起来。 某一项工作中,重要的、核心参考文献,在工作目录下最好有一个References的文件夹,存上这些重要文献。
31.341463
153
0.831128
yue_Hant
0.768618
dada6f29ebc788d396154904a4aa82f84f58058c
668
md
Markdown
packages/demux/CHANGELOG.md
danielAlvess/aloxide
c926af1eb2601e389237c21d77a3e756bbd4ad07
[ "Apache-2.0" ]
null
null
null
packages/demux/CHANGELOG.md
danielAlvess/aloxide
c926af1eb2601e389237c21d77a3e756bbd4ad07
[ "Apache-2.0" ]
null
null
null
packages/demux/CHANGELOG.md
danielAlvess/aloxide
c926af1eb2601e389237c21d77a3e756bbd4ad07
[ "Apache-2.0" ]
null
null
null
# Change Log All notable changes to this project will be documented in this file. See [Conventional Commits](https://conventionalcommits.org) for commit guidelines. # 0.1.0 (2020-09-03) ### Bug Fixes * **app:** :lipstick: fix dependency ([8a9661a](https://github.com/lecle/aloxide/commit/8a9661a99d5d6e3598eea1629e7223b9ba60250d)) * **demux:** :bug: initialize started block with the lastest processed block ([0d24040](https://github.com/lecle/aloxide/commit/0d24040f6717a3c649d962277d96535797c470c7)) ### Features * **app:** :zap: add dummy data for testing server ([4a37ed3](https://github.com/lecle/aloxide/commit/4a37ed3b23b954ddffd6e45c17cc908533979fa6))
37.111111
170
0.767964
eng_Latn
0.479103
dadb51b46774734361844e2d749e7332d4464aaf
74
md
Markdown
README.md
georgepestell/CNN
fa68b0f253b07b6e773587e2003b717de946af0b
[ "MIT" ]
null
null
null
README.md
georgepestell/CNN
fa68b0f253b07b6e773587e2003b717de946af0b
[ "MIT" ]
null
null
null
README.md
georgepestell/CNN
fa68b0f253b07b6e773587e2003b717de946af0b
[ "MIT" ]
null
null
null
# CNN Looking at understanding and creating Convolutional Neural Networks
24.666667
67
0.851351
eng_Latn
0.995303
dadbe55ba443a3413ec68a139cb4f8653ec1dd90
3,276
md
Markdown
articles/azure-web-pubsub/reference-server-sdk-csharp.md
BaherAbdullah/azure-docs
65d82440dd3209697fdb983ef456b0a2293e270a
[ "CC-BY-4.0", "MIT" ]
null
null
null
articles/azure-web-pubsub/reference-server-sdk-csharp.md
BaherAbdullah/azure-docs
65d82440dd3209697fdb983ef456b0a2293e270a
[ "CC-BY-4.0", "MIT" ]
null
null
null
articles/azure-web-pubsub/reference-server-sdk-csharp.md
BaherAbdullah/azure-docs
65d82440dd3209697fdb983ef456b0a2293e270a
[ "CC-BY-4.0", "MIT" ]
null
null
null
--- title: Reference - .NET server SDK for Azure Web PubSub service description: The reference describes the .NET server SDK for Azure Web PubSub service author: vicancy ms.author: lianwei ms.service: azure-web-pubsub ms.topic: conceptual ms.date: 08/26/2021 --- # .NET server SDK for Azure Web PubSub service This library can be used to do the following actions. Details about the terms used here are described in [Key concepts](#key-concepts) section. - Send messages to hubs and groups. - Send messages to particular users and connections. - Organize users and connections into groups. - Close connections - Grant, revoke, and check permissions for an existing connection [Source code][code] | [Package][package] | [API reference documentation][api] | [Product documentation](https://aka.ms/awps/doc) | [Samples][samples_ref] ## Getting started ### Install the package Install the client library from [NuGet](https://www.nuget.org/): ```PowerShell dotnet add package Azure.Messaging.WebPubSub --prerelease ``` ### Prerequisites - An [Azure subscription][azure_sub]. - An existing Azure Web PubSub service instance. ### Authenticate the client In order to interact with the service, you'll need to create an instance of the WebPubSubServiceClient class. To make this possible, you'll need the connection string or a key, which you can access in the Azure portal. ### Create a `WebPubSubServiceClient` ```csharp var serviceClient = new WebPubSubServiceClient(new Uri("<endpoint>"), "<hub>", new AzureKeyCredential("<access-key>")); ``` ## Key concepts [!INCLUDE [Termsc](includes/terms.md)] ## Examples ### Broadcast a text message to all clients ```C# Snippet:WebPubSubHelloWorld var serviceClient = new WebPubSubServiceClient(new Uri(endpoint), "some_hub", new AzureKeyCredential(key)); serviceClient.SendToAll("Hello World!"); ``` ### Broadcast a JSON message to all clients ```C# Snippet:WebPubSubSendJson var serviceClient = new WebPubSubServiceClient(new Uri(endpoint), "some_hub", new AzureKeyCredential(key)); serviceClient.SendToAll(RequestContent.Create( new { Foo = "Hello World!", Bar = 42 }), ContentType.ApplicationJson); ``` ### Broadcast a binary message to all clients ```C# Snippet:WebPubSubSendBinary var serviceClient = new WebPubSubServiceClient(new Uri(endpoint), "some_hub", new AzureKeyCredential(key)); Stream stream = BinaryData.FromString("Hello World!").ToStream(); serviceClient.SendToAll(RequestContent.Create(stream), ContentType.ApplicationOctetStream); ``` ## Troubleshooting ### Setting up console logging You can also easily [enable console logging](https://github.com/Azure/azure-sdk-for-net/blob/master/sdk/core/Azure.Core/samples/Diagnostics.md#logging) if you want to dig deeper into the requests you're making against the service. [azure_sub]: https://azure.microsoft.com/free/ [samples_ref]: https://github.com/Azure/azure-webpubsub/tree/main/samples/csharp [code]: https://github.com/Azure/azure-sdk-for-net/blob/master/sdk/webpubsub/Azure.Messaging.WebPubSub/src [package]: https://www.nuget.org/packages/Azure.Messaging.WebPubSub [api]: /dotnet/api/azure.messaging.webpubsub ## Next steps [!INCLUDE [next step](includes/include-next-step.md)]
32.117647
230
0.751832
eng_Latn
0.467431
dadc14ab0cf9b4aca29fb638a4d3187611816d8d
330
md
Markdown
tags/java/paquete/java.security.md
w3api/w3api
681462ece7265723031a88bec5285209d0e125bf
[ "MIT" ]
1
2021-09-15T20:32:10.000Z
2021-09-15T20:32:10.000Z
tags/java/paquete/java.security.md
w3api/w3api
681462ece7265723031a88bec5285209d0e125bf
[ "MIT" ]
20
2021-01-17T01:13:46.000Z
2021-06-20T21:16:02.000Z
tags/java/paquete/java.security.md
w3api/w3api
681462ece7265723031a88bec5285209d0e125bf
[ "MIT" ]
2
2021-09-15T20:32:08.000Z
2022-02-20T16:57:46.000Z
--- title: "Paquete java.security" layout: tag permalink: /Java/tag/java.security/ date: 2020-12-29 key: Java.quetejava.security sidebar: nav: java aside: toc: true pagination: enabled: true tag: "java.security" permalink: /:num/ --- <h2>Elementos</h2> Todos los elementos del paquete <strong>java.security</strong>
17.368421
62
0.712121
hun_Latn
0.180017
dadc3683de5d1dca5b438dad570147911bfce7f3
246
md
Markdown
vendor/github.com/inconshreveable/go-update/internal/binarydist/Readme.md
brandon-bailey/tfsec
76ccc97ac4afe1246d471c3fea021354f90f61b4
[ "MIT" ]
12,677
2016-06-21T12:51:13.000Z
2022-03-31T23:58:01.000Z
src/github.com/kr/binarydist/Readme.md
linnoreading/lantern
e46cebcc957d27f283552dd4bef1a84739c8c1ed
[ "BSD-2-Clause" ]
3,710
2015-01-01T01:03:57.000Z
2021-09-04T16:18:03.000Z
src/github.com/kr/binarydist/Readme.md
linnoreading/lantern
e46cebcc957d27f283552dd4bef1a84739c8c1ed
[ "BSD-2-Clause" ]
623
2015-01-02T05:03:43.000Z
2022-03-15T17:35:21.000Z
# binarydist Package binarydist implements binary diff and patch as described on <http://www.daemonology.net/bsdiff/>. It reads and writes files compatible with the tools there. Documentation at <http://go.pkgdoc.org/github.com/kr/binarydist>.
30.75
67
0.788618
eng_Latn
0.951526
dadca455bdd78bce118eb9140789a88cfd07088d
3,513
md
Markdown
README.md
pardjs/starter
ab5674952e68f98859e38f7f3f6ee4ea7da11906
[ "MIT" ]
null
null
null
README.md
pardjs/starter
ab5674952e68f98859e38f7f3f6ee4ea7da11906
[ "MIT" ]
null
null
null
README.md
pardjs/starter
ab5674952e68f98859e38f7f3f6ee4ea7da11906
[ "MIT" ]
null
null
null
<!-- --> # Pard.js starter This project will be the parent of the real restful api projects. [![Coverage Status](https://coveralls.io/repos/github/pardjs/pardjs-starter/badge.svg)](https://coveralls.io/github/pardjs/pardjs-starter) - [x] Use `@pardjs/common` customized components. - [x] Travis CI - [x] Test suite - [x] Code linting - [x] Commit regulate. - [x] API docs by `@nestjs/swagger` - [x] Project docs `@compodoc/compodoc` - [ ] Automated release. - [ ] Use `@pardjs/health-check-service` - [ ] Use `@pardjs/users-service` - [ ] typeorm migrations - [ ] psql docker-compose.yml for development environment. - [ ] ansible config - [ ] Dockerfile - [ ] docker-compose.yml for service - [ ] APM --- > Content below comes from `github.com/nestjs/typescript-starter` <p align="center"> <a href="http://nestjs.com/" target="blank"> <img src="https://nestjs.com/img/logo_text.svg" width="320" alt="Nest Logo" /></a> </p> [travis-image]: https://api.travis-ci.org/nestjs/nest.svg?branch=master [travis-url]: https://travis-ci.org/nestjs/nest [linux-image]: https://img.shields.io/travis/nestjs/nest/master.svg?label=linux [linux-url]: https://travis-ci.org/nestjs/nest <p align="center"> A progressive <a href="http://nodejs.org" target="blank">Node.js</a> framework for building efficient and scalable server-side applications, heavily inspired by <a href="https://angular.io" target="blank">Angular</a>.</p> <p align="center"> <a href="https://www.npmjs.com/~nestjscore"> <img src="https://img.shields.io/npm/v/@nestjs/core.svg" alt="NPM Version" /></a> <a href="https://www.npmjs.com/~nestjscore"> <img src="https://img.shields.io/npm/l/@nestjs/core.svg" alt="Package License" /></a> <a href="https://www.npmjs.com/~nestjscore"> <img src="https://img.shields.io/npm/dm/@nestjs/core.svg" alt="NPM Downloads" /></a> <a href="https://travis-ci.org/nestjs/nest"> <img src="https://api.travis-ci.org/nestjs/nest.svg?branch=master" alt="Travis" /></a> <a href="https://travis-ci.org/nestjs/nest"> <img src="https://img.shields.io/travis/nestjs/nest/master.svg?label=linux" alt="Linux" /> </a> <a href="https://coveralls.io/github/nestjs/nest?branch=master"> <img src="https://coveralls.io/repos/github/nestjs/nest/badge.svg?branch=master#5" alt="Coverage" /> </a> <a href="https://gitter.im/nestjs/nestjs?utm_source=badge&utm_medium=badge&utm_campaign=pr-badge&utm_content=body_badge"> <img src="https://badges.gitter.im/nestjs/nestjs.svg" alt="Gitter" /></a> <a href="https://opencollective.com/nest#backer"> <img src="https://opencollective.com/nest/backers/badge.svg" alt="Backers on Open Collective" /> </a> <a href="https://opencollective.com/nest#sponsor"> <img src="https://opencollective.com/nest/sponsors/badge.svg" alt="Sponsors on Open Collective" /> </a> <a href="https://paypal.me/kamilmysliwiec"><img src="https://img.shields.io/badge/Donate-PayPal-dc3d53.svg"/></a> <a href="https://twitter.com/nestframework"><img src="https://img.shields.io/twitter/follow/nestframework.svg?style=social&label=Follow"></a> </p> ## Description [Nest](https://github.com/nestjs/nest) framework TypeScript starter repository. ## Installation ```bash npm install ``` ## Running the app ```bash # development $ npm run start # watch mode $ npm run start:dev # production mode $ npm run start:prod ``` ## Test ```bash # unit tests $ npm run test # e2e tests $ npm run test:e2e # test coverage $ npm run test:cov ``` ## License Nest is [MIT licensed](LICENSE).
30.284483
143
0.69741
yue_Hant
0.287914
dadd27b20a576a0a72f124d439d1020e00a9c3bc
313
md
Markdown
collections/_episodes/C&G35.md
alonshvartsman/bitcoin-resources.github.io
11dc1e61b70f14f85124b8a938e345ae6b953bb4
[ "MIT" ]
1
2015-12-13T09:09:57.000Z
2015-12-13T09:09:57.000Z
collections/_episodes/C&G35.md
alonshvartsman/bitcoin-resources.github.io
11dc1e61b70f14f85124b8a938e345ae6b953bb4
[ "MIT" ]
6
2015-12-12T22:58:11.000Z
2021-09-28T05:35:00.000Z
collections/_episodes/C&G35.md
alonshvartsman/bitcoin-resources.github.io
11dc1e61b70f14f85124b8a938e345ae6b953bb4
[ "MIT" ]
6
2015-12-05T15:19:27.000Z
2015-12-15T12:06:43.000Z
--- layout: page title: Jameson Lopp on Privacy, Security, and Personal Sovereignty podcast: C&G episode: 35 hosts: CryptoDantes and Stigofthepump date: Aug 7, 2019 guest: Jameson Lopp lesson: ['19'] link: https://cryptoandgrill.podbean.com/e/bitcoin-privacy-security-and-personal-sovereignty-w-jameson-lopp/ ---
26.083333
108
0.773163
eng_Latn
0.344633
daddd4b9763b6350ba4eef290666a21266b76c1f
827
md
Markdown
javascript/web-kernel.md
greedbell/blog
525407372163d775db2c0d8adb433ad489069b35
[ "MIT" ]
21
2017-03-02T03:15:59.000Z
2022-01-04T07:08:00.000Z
javascript/web-kernel.md
greedbell/blog
525407372163d775db2c0d8adb433ad489069b35
[ "MIT" ]
null
null
null
javascript/web-kernel.md
greedbell/blog
525407372163d775db2c0d8adb433ad489069b35
[ "MIT" ]
3
2017-04-27T09:12:36.000Z
2019-05-06T16:16:37.000Z
# Web 内核 * [MDN JS 引擎比较](https://developer.mozilla.org/zh-CN/docs/Mozilla/Projects/SpiderMonkey/Comparision_of_JS_engines) * [wiki](https://en.wikipedia.org/wiki/JavaScript_engine) * [v8](http://v8project.blogspot.hk/) * [apple javascriptcore](https://developer.apple.com/reference/javascriptcore) * [javascriptcore](http://trac.webkit.org/wiki/JavaScriptCore) * [主流浏览器内核介绍(前端开发值得了解的浏览器内核历史)](http://web.jobbole.com/84826/) 浏览器内核又可以分成两部分:渲染引擎(layout engineer 或者 Rendering Engine)和 JS 引擎。 * 渲染引擎 负责取得网页的内容(HTML、XML、图像等等)、整理讯息(例如加入 CSS 等),以及计算网页的显示方式,然后会输出至显示器或打印机。 * JS 引擎则是解析 Javascript 语言,执行 javascript 语言来实现网页的动态效果。 ## 内核 * Gecko(firefox) * Trident(IE) * WebKit (Safari chrome 等): 由渲染引擎 WebCore 和 JS 解释引擎 JSCore 组成 * Presto(前opera) ## 渲染引擎 * WebCore * Blink (chrome): WebKit 的分支 ## Javascript 引擎 * KJS * JSCore * V8
25.84375
113
0.748489
yue_Hant
0.665686
dadeaa71fa75164ca82c0b48a12f67b3a61ad2b1
854
md
Markdown
README.md
vpaladino778/pybind11_examples
156f4a0ef9d9a5b1683137e920a724cb1d056a5d
[ "MIT" ]
1
2019-09-25T15:35:47.000Z
2019-09-25T15:35:47.000Z
README.md
vpaladino778/pybind11_examples
156f4a0ef9d9a5b1683137e920a724cb1d056a5d
[ "MIT" ]
null
null
null
README.md
vpaladino778/pybind11_examples
156f4a0ef9d9a5b1683137e920a724cb1d056a5d
[ "MIT" ]
null
null
null
# pybind11_examples [Pybind11](https://github.com/pybind/pybind11) is an extremly useful header-only library for interacting code between Python and C++. The purpose of this repository is to provide clean, well documented examples for solutions to specific problems that you might encounter during your development with pybind11. ## Building Examples ### Windows #### Pre-requisites * [Microsoft Visual Studio 2015](https://visualstudio.microsoft.com/downloads/) * [CMake](https://cmake.org/download/) - Select `Windows win64-x86 Installer` * Make * git Make sure make, cmake, and git are all added to your system's PATH. #### Build Commands Build commands are included in the README of each example's folder. ### Linux ```bash sudo apt install build-essential cmake make git ``` #### Pre-requisites #### Build Commands ## Contribute
25.878788
309
0.745902
eng_Latn
0.986713
dadf0e01b480e836d389bf79fd5f32f0b529b961
11,265
md
Markdown
work/deep_learning/framework.md
oaix/note
2e93309d27884756bbd6222e5ce64095bbd5bf0e
[ "MIT" ]
1
2021-11-09T12:53:55.000Z
2021-11-09T12:53:55.000Z
work/deep_learning/framework.md
oaix/note
2e93309d27884756bbd6222e5ce64095bbd5bf0e
[ "MIT" ]
null
null
null
work/deep_learning/framework.md
oaix/note
2e93309d27884756bbd6222e5ce64095bbd5bf0e
[ "MIT" ]
null
null
null
# Framework ## TensorFlow ### [install](https://medium.com/@yckim/tensorflow-1-3-install-on-ubuntu-16-04-2d191a6e5546) ```sh sudo pip install tensorflow-gpu==1.3.0 pip install --robosense install tensorflow==1.3.0 ``` ## [PyTorch](https://pytorch.org/docs/stable/torch.html#torch.arange) 推荐使用conda安装。 ### [tensorboardX可视化](https://tensorboardx.readthedocs.io/en/latest/tensorboard.html) ```python from tensorboardX import SummaryWriter writer = SummaryWriter(logdir=root_directory + '/log', comment='L3Net') writer.add_scalar('train_loss', loss, epoch) # writer.add_scalar('myscalar', value, iteration) with writer: pc_map_index_new_0 = np.random.randint(0, 1000, 128*64*1331) pc_map_index_new_00 = torch.from_numpy(pc_map_index_new_0).unsqueeze(0) arr_map0 = torch.randn(1, 1100, 4) arr_online0 = torch.randn(1, 128*64, 4) label0 = torch.randn(1, 1, 3) if use_gpu: pc_map_index_new_00, arr_map0, arr_online0 = pc_map_index_new_00.to(gpu), arr_map0.to(gpu), arr_online0.to(gpu) writer.add_graph(model, (pc_map_index_new_00, arr_map0, arr_online0)) writer.close() ``` ### [save model](https://stackoverflow.com/questions/42703500/best-way-to-save-a-trained-model-in-pytorch) https://github.com/pytorch/tutorials/blob/master/beginner_source/saving_loading_models.py https://pytorch.org/tutorials/beginner/saving_loading_models.html ```python import os import torch import torch.nn as nn ''' Notes: # save and load entire model torch.save(model, "model.pth") model = torch.load("model.pth") # save and load only the model parameters(recommended) torch.save(model.state_dict(), "params.pth") model.load_state_dict(torch.load("params.pth")) ''' __all__ = ["CheckPoint"] class CheckPoint(object): ''' save model state to file check_point_params: model, optimizer, epoch ''' def __init__(self, save_path): '''initialize class Arguments: save_path {string} -- path to save files ''' self.save_path = os.path.join(save_path, "check_point") # make directory if not os.path.isdir(self.save_path): print ">>> Path not exists, create path {}".format(self.save_path) os.makedirs(self.save_path) def load_state_dict(self, model, pth_file): '''load state dict from file Arguments: model {nn.Module} -- target model pth_file {string} -- path of the saved state_dict Returns: [nn.Module] -- new model ''' model.eval() state_dict = torch.load(pth_file) model.load_state_dict(state_dict) return model def load_checkpoint(self, checkpoint_path): '''load checkpoint file Arguments: checkpoint_path {string} -- path to the checkpoint file Returns: model_state_dict {dict} -- state dict of model optimizer_state_dict {dict} -- state dict of optimizer epoch {integer} -- number of epoch ''' if os.path.isfile(checkpoint_path): print ">>> Load resume check-point from:", checkpoint_path self.check_point_params = torch.load(checkpoint_path) model_state_dict = self.check_point_params['model'] optimizer_state_dict = self.check_point_params['optimizer'] extra_params = self.check_point_params['extra_params'] # because we save the index of lastest training/testing epochs, # we need to add 1 to start a new epoch. epoch = self.check_point_params['epoch'] return model_state_dict, optimizer_state_dict, epoch else: assert False, "file not exits" + checkpoint_path def save_checkpoint(self, model, optimizer, epoch, name, extra_params=None): '''save checkpoint data of target model and optimizer Arguments: model {torch.nn.Module} -- model optimizer {torch.nn.optimizer} -- optimizer epoch {integer} -- index of training/testing epochs name {string} -- name of pth file Note: if we add hook to the grad by using register_hook(hook), then the hook function can not be saved so we need to save state_dict() only. Although save state dictionary is recommended, some times we still need to save the whole model as it can save all the information of the trained model, and we do not need to create a new network in next time. However, the GPU information will be saved too, which leads to some issues when we use the model on different machine this function will generate a checkpoint file, which contains a dict { 'model': state dict of model 'optimizer': state dict of optimizer 'epoch': index of training epochs 'extra_params': extra params to save } ''' self.check_point_params = {'model': None, 'optimizer': None, 'epoch': None, 'extra_params': None} model.eval() # get state_dict from model and optimizer if isinstance(model, nn.DataParallel): model_state_dict = model.module.state_dict() else: model_state_dict = model.state_dict() optimizer_state_dict = optimizer.state_dict() # save information to a dict self.check_point_params['model'] = model_state_dict self.check_point_params['optimizer'] = optimizer_state_dict self.check_point_params['epoch'] = epoch + \ 1 # add 1 to start a new epoch self.check_point_params['extra_params'] = extra_params # save to file torch.save(self.check_point_params, os.path.join( self.save_path, name)) def save_state_dict(self, model, name): '''save state dict of model Arguments: model {torch.nn.Module} -- model to save name {bool} -- name of saved file ''' # get state dict if isinstance(model, nn.DataParallel): model_state_dict = model.module.state_dict() else: model_state_dict = model.state_dict() torch.save(model_state_dict, os.path.join(self.save_path, name)) ``` #### Case # 1: Save the model to use it yourself for inference You save the model, you restore it, and then you change the model to evaluation mode. This is done because you usually have `BatchNorm`and `Dropout` layers that by default are in train mode on construction: ```python torch.save(model.state_dict(), filepath) #Later to restore: model.load_state_dict(torch.load(filepath)) model.eval() # Remember that you must call ``model.eval()`` to set dropout and batch # normalization layers to evaluation mode before running inference. # Failing to do this will yield inconsistent inference results. ``` #### Case # 2: Save model to resume training later If you need to keep training the model that you are about to save, you need to save more than just the model. You also need to save the state of the optimizer, epochs, score, etc. You would do it like this: ```python state = { 'epoch': epoch, 'state_dict': model.state_dict(), 'optimizer': optimizer.state_dict(), ... } torch.save(state, filepath) # model = TheModelClass(*args, **kwargs) # optimizer = TheOptimizerClass(*args, **kwargs) # # checkpoint = torch.load(PATH) # model.load_state_dict(checkpoint['model_state_dict']) # optimizer.load_state_dict(checkpoint['optimizer_state_dict']) # epoch = checkpoint['epoch'] # loss = checkpoint['loss'] # If you # wish to resuming training, call ``model.train()`` to ensure these layers # are in training mode. ``` #### Case # 3: Model to be used by someone else with no access to your code In Tensorflow you can create a `.pb` file that defines both the architecture and the weights of the model. This is very handy, specially when using `Tensorflow serve`. The equivalent way to do this in Pytorch would be: ```python torch.save(model, filepath) # Then later: model = torch.load(filepath) ``` ### [构建模型的几种方法](https://www.cnblogs.com/denny402/p/7593301.html) ```python import torch import torch.nn.functional as F from collections import OrderedDict # Method 1 ----------------------------------------- class Net1(torch.nn.Module): def __init__(self): super(Net1, self).__init__() self.conv1 = torch.nn.Conv2d(3, 32, 3, 1, 1) self.dense1 = torch.nn.Linear(32 * 3 * 3, 128) self.dense2 = torch.nn.Linear(128, 10) def forward(self, x): x = F.max_pool2d(F.relu(self.conv(x)), 2) x = x.view(x.size(0), -1) x = F.relu(self.dense1(x)) x = self.dense2() return x print("Method 1:") model1 = Net1() print(model1) # Method 2 ------------------------------------------ class Net2(torch.nn.Module): def __init__(self): super(Net2, self).__init__() self.conv = torch.nn.Sequential( torch.nn.Conv2d(3, 32, 3, 1, 1), torch.nn.ReLU(), torch.nn.MaxPool2d(2)) self.dense = torch.nn.Sequential( torch.nn.Linear(32 * 3 * 3, 128), torch.nn.ReLU(), torch.nn.Linear(128, 10) ) def forward(self, x): conv_out = self.conv1(x) res = conv_out.view(conv_out.size(0), -1) out = self.dense(res) return out print("Method 2:") model2 = Net2() print(model2) # Method 3 ------------------------------- class Net3(torch.nn.Module): def __init__(self): super(Net3, self).__init__() self.conv=torch.nn.Sequential() self.conv.add_module("conv1",torch.nn.Conv2d(3, 32, 3, 1, 1)) self.conv.add_module("relu1",torch.nn.ReLU()) self.conv.add_module("pool1",torch.nn.MaxPool2d(2)) self.dense = torch.nn.Sequential() self.dense.add_module("dense1",torch.nn.Linear(32 * 3 * 3, 128)) self.dense.add_module("relu2",torch.nn.ReLU()) self.dense.add_module("dense2",torch.nn.Linear(128, 10)) def forward(self, x): conv_out = self.conv1(x) res = conv_out.view(conv_out.size(0), -1) out = self.dense(res) return out print("Method 3:") model3 = Net3() print(model3) # Method 4 ------------------------------------------ class Net4(torch.nn.Module): def __init__(self): super(Net4, self).__init__() self.conv = torch.nn.Sequential( OrderedDict( [ ("conv1", torch.nn.Conv2d(3, 32, 3, 1, 1)), ("relu1", torch.nn.ReLU()), ("pool", torch.nn.MaxPool2d(2)) ] )) self.dense = torch.nn.Sequential( OrderedDict([ ("dense1", torch.nn.Linear(32 * 3 * 3, 128)), ("relu2", torch.nn.ReLU()), ("dense2", torch.nn.Linear(128, 10)) ]) ) def forward(self, x): conv_out = self.conv1(x) res = conv_out.view(conv_out.size(0), -1) out = self.dense(res) return out print("Method 4:") model4 = Net4() print(model4) ```
32.002841
218
0.621749
eng_Latn
0.762098
dadf205b0cd92b3aa01afdfc97017ea9d88e4035
255
md
Markdown
library/juniper/event/protocols/mpls_clear_secondary_path_after_inactive/readme.md
amarhold/junoscriptorium
d2cd74e451c7bfb4d61f9f8ae78e049c8f1f3cee
[ "BSD-3-Clause" ]
117
2015-02-07T09:37:13.000Z
2022-02-10T15:37:09.000Z
library/juniper/event/protocols/mpls_clear_secondary_path_after_inactive/readme.md
amarhold/junoscriptorium
d2cd74e451c7bfb4d61f9f8ae78e049c8f1f3cee
[ "BSD-3-Clause" ]
7
2015-02-19T21:06:15.000Z
2018-03-08T12:12:29.000Z
library/juniper/event/protocols/mpls_clear_secondary_path_after_inactive/readme.md
amarhold/junoscriptorium
d2cd74e451c7bfb4d61f9f8ae78e049c8f1f3cee
[ "BSD-3-Clause" ]
57
2015-02-17T15:37:12.000Z
2022-01-28T06:14:44.000Z
Instructions ============ 1. Copy the file to /var/db/scripts/event on each routing-engine 2. add the following configuration to the router event-options { event-script { file clear_secondary_path_after_inactive.slax; } }
17
65
0.658824
eng_Latn
0.970474
dadf2c592643664a33922afab076712f9c6ff9aa
13,873
md
Markdown
docs/src/lecture_09/exercises.md
petrmvala/JuliaCourse
8169ec94eb06d0c6fe098a06ea5d061b16ab6556
[ "MIT" ]
null
null
null
docs/src/lecture_09/exercises.md
petrmvala/JuliaCourse
8169ec94eb06d0c6fe098a06ea5d061b16ab6556
[ "MIT" ]
null
null
null
docs/src/lecture_09/exercises.md
petrmvala/JuliaCourse
8169ec94eb06d0c6fe098a06ea5d061b16ab6556
[ "MIT" ]
1
2021-12-09T10:36:10.000Z
2021-12-09T10:36:10.000Z
```@setup nn using RDatasets using Plots using Random using Statistics using LinearAlgebra function split(X, y::AbstractVector; dims=1, ratio_train=0.8, kwargs...) n = length(y) size(X, dims) == n || throw(DimensionMismatch("...")) n_train = round(Int, ratio_train*n) i_rand = randperm(n) i_train = i_rand[1:n_train] i_test = i_rand[n_train+1:end] return selectdim(X, dims, i_train), y[i_train], selectdim(X, dims, i_test), y[i_test] end function normalize(X_train, X_test; dims=1, kwargs...) col_means = mean(X_train; dims) col_std = std(X_train; dims) return (X_train .- col_means) ./ col_std, (X_test .- col_means) ./ col_std end function onehot(y, classes) y_onehot = falses(length(classes), length(y)) for (i, class) in enumerate(classes) y_onehot[i, y .== class] .= 1 end return y_onehot end onecold(y, classes) = [classes[argmax(y_col)] for y_col in eachcol(y)] function prepare_data(X, y; do_normal=true, do_onehot=true, kwargs...) X_train, y_train, X_test, y_test = split(X, y; kwargs...) if do_normal X_train, X_test = normalize(X_train, X_test; kwargs...) end classes = unique(y) if do_onehot y_train = onehot(y_train, classes) y_test = onehot(y_test, classes) end return X_train, y_train, X_test, y_test, classes end # SimpleNet struct SimpleNet{T<:Real} W1::Matrix{T} b1::Vector{T} W2::Matrix{T} b2::Vector{T} end SimpleNet(n1, n2, n3) = SimpleNet(randn(n2, n1), randn(n2), randn(n3, n2), randn(n3)) function (m::SimpleNet)(x) z1 = m.W1*x .+ m.b1 a1 = max.(z1, 0) z2 = m.W2*a1 .+ m.b2 return exp.(z2) ./ sum(exp.(z2), dims=1) end function grad(m::SimpleNet, x::AbstractVector, y; ϵ=1e-10) z1 = m.W1*x .+ m.b1 a1 = max.(z1, 0) z2 = m.W2*a1 .+ m.b2 a2 = exp.(z2) ./ sum(exp.(z2), dims=1) l = -sum(y .* log.(a2 .+ ϵ)) e_z2 = exp.(z2) l_part = (- e_z2 * e_z2' + Diagonal(e_z2 .* sum(e_z2))) / sum(e_z2)^2 l_a2 = - y ./ (a2 .+ ϵ) l_z2 = l_part * l_a2 l_a1 = m.W2' * l_z2 l_z1 = l_a1 .* (a1 .> 0) l_x = m.W1' * l_z1 l_W2 = l_z2 * a1' l_b2 = l_z2 l_W1 = l_z1 * x' l_b1 = l_z1 return l, l_W1, l_b1, l_W2, l_b2 end mean_tuple(d::AbstractArray{<:Tuple}) = Tuple([mean([d[k][i] for k in 1:length(d)]) for i in 1:length(d[1])]) predict(X) = m(X) accuracy(X, y) = mean(onecold(predict(X), classes) .== onecold(y, classes)) ``` ```@setup nn iris = dataset("datasets", "iris") X = Matrix(iris[:, 1:4]) y = iris.Species ``` # [Exercises](@id l9-exercises) ```@raw html <div class = "homework-body"> <header class = "homework-header">Homework: Optimal setting</header><p> ``` Perform an analysis of hyperparameters of the neural network from this lecture. Examples may include network architecture, learning rate (stepsize), activation functions or normalization. Write a short summary (in LaTeX) of your suggestions. ```@raw html </p></div> ``` ```@raw html <div class = "exercise-body"> <header class = "exercise-header">Exercise 1: Keyword arguments</header><p> ``` Keyword arguments (often denoted as `kwargs` but any name may be used) specify additional arguments which do not need to be used when the function is called. We recall the `prepare_data` function written earlier. ```@example nn function prepare_data(X, y; do_normal=true, do_onehot=true, kwargs...) X_train, y_train, X_test, y_test = split(X, y; kwargs...) if do_normal X_train, X_test = normalize(X_train, X_test; kwargs...) end classes = unique(y) if do_onehot y_train = onehot(y_train, classes) y_test = onehot(y_test, classes) end return X_train, y_train, X_test, y_test, classes end nothing # hide ``` All keyword arguments `kwargs` will be passed to the `split` and `normalize` functions. The benefit is that we do not need to specify the keyword arguments for `split` in `prepare_data`. Recall that `split` takes `ratio_split` as an optional argument. Write a one-line function ```ratio_train``` which gets the training and testing sets and computes the ratio of samples in the training set. Then call the `prepare_data` with: - no normalization and the default split ratio; - normalization and the split ratio of 50/50; ```@raw html </p></div> <details class = "solution-body"> <summary class = "solution-header">Solution:</summary><p> ``` The ```ratio_train``` function reads: ```@example nn ratio_train(X_train, X_test) = size(X_train, 2) / (size(X_train,2) + size(X_test,2)) nothing # hide ``` The first case uses the default ratio; hence we do not pass `ratio_split`. Since we do not want to use normalization, we need to pass `do_normal=false`. ```@example nn X_train, y_train, X_test, y_test, classes = prepare_data(X', y; dims=2, do_normal=false) println("Ratio train/test = ", ratio_train(X_train, X_test)) ``` The second case behaves the other way round. We use the default normalization; thus, we do not need to specify `do_normal=true` (even though it may be a good idea). We need to pass `ratio_train=0.5`. ```@example nn X_train, y_train, X_test, y_test, classes = prepare_data(X', y; dims=2, ratio_train=0.5) println("Ratio train/test = ", ratio_train(X_train, X_test)) ``` ```@raw html </p></details> ``` The goal of the following exercise is to show the prediction function graphically. For this reason, we will consider only two features. All the following exercises use the data with the fixed seed for reproducibility. ```@example nn Random.seed!(666) X_train, y_train, X_test, y_test, classes = prepare_data(X[:,3:4]', y; dims = 2) nothing # hide ``` ```@raw html <div class = "exercise-body"> <header class = "exercise-header">Exercise 2: Showing the contours</header><p> ``` Use the same training procedure for 1000 iterations to train the classifier with the new data. Then plot a graph depicting which classes are predicted at subregions of ``[-2,2]\times [-2,2]``. Moreover, depict the testing data in this graph. **Hint**: use the `heatmap` function. ```@raw html </p></div> <details class = "solution-body"> <summary class = "solution-header">Solution:</summary><p> ``` The procedure for training the network is the same as during the lecture. ```@example nn m = SimpleNet(size(X_train,1), 5, size(y_train,1)) α = 1e-1 max_iter = 1000 for iter in 1:max_iter grad_all = [grad(m, X_train[:,k], y_train[:,k]) for k in 1:size(X_train,2)] grad_mean = mean_tuple(grad_all) m.W1 .-= α*grad_mean[2] m.b1 .-= α*grad_mean[3] m.W2 .-= α*grad_mean[4] m.b2 .-= α*grad_mean[5] end nothing # hide ``` The prediction function is `m([x;y])`. Since this creates a one-hot representation, we need to convert it into a one-cold representation. However, it is not possible to use `onecold(m([x; y]), classes)`, which would result in one of the three string labels. We need to use `onecold(m([x; y]), 1:3)` to convert it to a real number. Then we call the `heatmap` function. Since we will later use plotting in a loop, we assign the graph to `plt`. ```@example nn colours = [:blue, :red, :green] xs = -2:0.01:2 plt = heatmap(xs, xs, (x, y) -> onecold(m([x; y]), 1:3)[1]; color = colours, opacity = 0.2, axis = false, ticks = false, cbar = false, legend = :topleft, ) nothing # hide ``` To add the predictions of the testing set, we find the indices `inds` of samples from each class. Then we add them via the `scatter!` plot. We keep `colours` from the previous part to have the same colours. Since we plotted in a loop, we need to `display` the plot. ```@example nn for (i, class) in enumerate(classes) inds = findall(onecold(y_test, classes) .== class) scatter!(plt, X_test[1, inds], X_test[2, inds]; label = class, marker=(8, 0.8, colours[i]), ) end display(plt) savefig("Separation.svg") # hide ``` ```@raw html </p></details> ``` ![](Separation.svg) ```@raw html <div class = "exercise-body"> <header class = "exercise-header">Exercise 3: Overfitting</header><p> ``` This exercise shows the well-known effect of overfitting. Since the model sees only the training set, it may fit it too perfectly (overfit it) and generalize poorly to the testing set of unseen examples. Consider the same data as in the previous exercise but train a network with 25 hidden neurons for 25000 iterations. Plot the loss function values on the training and testing sets. Then plot the same prediction visualization as in the previous exercise for both testing and training sets. Describe what went wrong. ```@raw html </p></div> <details class = "solution-body"> <summary class = "solution-header">Solution:</summary><p> ``` We first specify the loss function. ```@example nn loss(X, y; ϵ = 1e-10) = mean(-sum(y .* log.(m(X) .+ ϵ); dims = 1)) nothing # hide ``` Then we train the network as before. The only change is that we need to save the training and testing objective. ```@example nn m = SimpleNet(size(X_train,1), 25, size(y_train,1)) α = 1e-1 max_iter = 25000 L_train = zeros(max_iter) L_test = zeros(max_iter) for iter in 1:max_iter grad_all = [grad(m, X_train[:,k], y_train[:,k]) for k in 1:size(X_train,2)] grad_mean = mean_tuple(grad_all) m.W1 .-= α*grad_mean[2] m.b1 .-= α*grad_mean[3] m.W2 .-= α*grad_mean[4] m.b2 .-= α*grad_mean[5] L_train[iter] = loss(X_train, y_train) L_test[iter] = loss(X_test, y_test) end ``` Then we plot it. We ignore the first nine iterations, where the loss is large there. We see the classical procedure of overfitting. While the loss function on the training set decreases steadily, on the testing set, it decreases first, and after approximately 100 iterations, it starts increasing. This behaviour may be prevented by several techniques, which we discuss in the following lecture. ```@example nn plot(L_train[10:end], xlabel="Iteration", label="Training loss", legend=:topleft) plot!(L_test[10:end], label="Testing loss") savefig("Train_test.svg") # hide ``` ![](Train_test.svg) We create the contour plot in the same way as in the previous exercise. ```@example nn plt = heatmap(xs, xs, (x, y) -> onecold(m([x; y]), 1:3)[1]; color = colours, opacity = 0.2, axis = false, ticks = false, cbar = false, legend = :topleft, ) for (i, class) in enumerate(classes) inds = findall(onecold(y_test, classes) .== class) scatter!(plt, X_test[1, inds], X_test[2, inds]; label = class, marker=(8, 0.8, colours[i]), ) end display(plt) savefig("Separation2.svg") # hide ``` ![](Separation2.svg) ```@example nn plt = heatmap(xs, xs, (x, y) -> onecold(m([x; y]), 1:3)[1]; color = colours, opacity = 0.2, axis = false, ticks = false, cbar = false, legend = :topleft, ) for (i, class) in enumerate(classes) inds = findall(onecold(y_train, classes) .== class) scatter!(plt, X_train[1, inds], X_train[2, inds]; label = class, marker=(8, 0.8, colours[i]), ) end display(plt) savefig("Separation3.svg") # hide ``` ![](Separation3.svg) The separation on the testing set is quite good, but it could be better for the two bottommost green circles (iris virginica). The model predicted (in the background) the red colour (iris versicolor) there. This is wrong. The reason is clear from the picture depicting the training set. The classifier tried to perfectly fit the boundary between the green and red points, making an outward-pointing tip. This is precisely overfitting and the reason for the misclassification on the testing set. ```@raw html </p></details> ``` ![](Separation2.svg) ![](Separation3.svg) ```@raw html <div class = "exercise-body"> <header class = "exercise-header">Exercise 4: Generalization</header><p> ``` The contour plots from Exercises 2 and 3 are strikingly different, especially in the top-left and bottom-right corners. Why is that? ```@raw html </p></div> <details class = "solution-body"> <summary class = "solution-header">Solution:</summary><p> ``` Since the dataset does not contain any data in the top-left or bottom-right corners, it does not know what to predict. From its perspective, both separations are very good. ```@raw html <div class = "info-body"> <header class = "info-header">Generalization</header><p> ``` If a classifier does not have any data in some region, it may predict anything there, including predictions with no sense. ```@raw html </p></div> ``` ```@raw html </p></details> ``` ```@raw html <div class = "exercise-body"> <header class = "exercise-header">Exercise 5: Universal approximation of neural networks (theory)</header><p> ``` Proof the theorem about universal approximation of neural networks. ```@raw html </p></div> <details class = "solution-body"> <summary class = "solution-header">Solution:</summary><p> ``` Since piecewise linear functions are dense in the set of continuous functions, there is a piecewise linear function ``h`` such that ``\|h-g\|_{\infty}\le \varepsilon``. Assume that ``h`` has kinks at ``x_1<\dots<x_n`` with function values ``h(x_i)=y_i`` for ``i=1,\dots,n``. Defining ```math d_i = \frac{y_{i+1}-y_i}{x_{i+1}-x_i}, ``` then ``h`` has the form ```math h(x) = y_i + d_i(x-x_i) \qquad\text{ for }x\in [x_i,x_{i+1}]. ``` It is not difficult to show that ```math h(x) = y_1 + \sum_{i=1}^n(d_i-d_{i-1})\operatorname{max}\{x-x_i,0\}, ``` where we defined ``d_0=0``. Then ``h`` can be represented as the following network with two layers: - Dense layer with ``n`` hidden neurons and ReLU activation function. Neuron ``i`` has weight ``1`` and bias ``-x_i``. - Dense layer with ``1`` output neurons and identity activation function. Connection ``i`` has weight ``d_i-d_{i-1}`` and the joint bias is ``y_1``. This finishes the proof. ```@raw html </p></details> ```
28.312245
494
0.673755
eng_Latn
0.95485
dadf701e6f1fc8069bca8a5b4343c2c6235fc676
218
md
Markdown
_watches/M20200531_060741_TLP_4.md
Meteoros-Floripa/meteoros.floripa.br
7d296fb8d630a4e5fec9ab1a3fb6050420fc0dad
[ "MIT" ]
5
2020-01-22T17:44:06.000Z
2020-01-26T17:57:58.000Z
_watches/M20200531_060741_TLP_4.md
Meteoros-Floripa/site
764cf471d85a6b498873610e4f3b30efd1fd9fae
[ "MIT" ]
null
null
null
_watches/M20200531_060741_TLP_4.md
Meteoros-Floripa/site
764cf471d85a6b498873610e4f3b30efd1fd9fae
[ "MIT" ]
2
2020-05-19T17:06:27.000Z
2020-09-04T00:00:43.000Z
--- layout: watch title: TLP4 - 31/05/2020 - M20200531_060741_TLP_4T.jpg date: 2020-05-31 06:07:41 permalink: /2020/05/31/watch/M20200531_060741_TLP_4 capture: TLP4/2020/202005/20200530/M20200531_060741_TLP_4T.jpg ---
27.25
62
0.784404
eng_Latn
0.049586
dadff80c749a53cc8026f39fbec3af4eb6210363
2,607
md
Markdown
README.md
AminoApps/context-propagation-go
46921238db815b6498730dd3ceb777baa18bfd4d
[ "Apache-2.0" ]
5
2019-07-16T02:50:54.000Z
2021-02-09T07:38:08.000Z
README.md
AminoApps/context-propagation-go
46921238db815b6498730dd3ceb777baa18bfd4d
[ "Apache-2.0" ]
null
null
null
README.md
AminoApps/context-propagation-go
46921238db815b6498730dd3ceb777baa18bfd4d
[ "Apache-2.0" ]
1
2020-01-21T09:13:35.000Z
2020-01-21T09:13:35.000Z
[![Build Status](https://travis-ci.org/AminoApps/context-propagation-go.svg?branch=master)](https://travis-ci.org/AminoApps/context-propagation-go) [![Go Report Card](https://goreportcard.com/badge/github.com/AminoApps/context-propagation-go)](https://goreportcard.com/report/github.com/AminoApps/context-propagation-go) # Context Propagation Go This middleware is used for support propagate context between micro services. For this version, we propagate context by [opentracing baggage](https://github.com/opentracing/specification/blob/master/specification.md) protocol. Supported framework for auto inject and extract: * [Gin](module/context-propagation-gin) * [Standard Http Server](module/context-propagation-http) * [Standard Http Client](module/context-propagation-http) * [gRPC Server](module/context-propagation-grpc) * [gRPC Client](module/context-propagation-grpc) ## How to use ### Operate data from context > Before get or set data from context, you should enable auto inject and extract. ```shell go get -u github.com/AminoApps/context-propagation-go ``` ```go package main import cp "github.com/AminoApps/context-propagation-go" ctx = cp.SetValueToContext(ctx, "my-key", "my-value") valye := cp.GetValueFromContext(context.Background(), "my-key") ``` ### Auto inject and extract #### Gin ```shell go get -u github.com/AminoApps/context-propagation-go/module/context-propagation-gin ``` ```go package main import cpgin "github.com/AminoApps/context-propagation-go/module/context-propagation-gin" e := gin.New() e.Use(cpgin.Middleware()) // For propagation context from gin, please use the context from the request func TestApi(c *gin.Context) { GetDataFromDataBase(c.Request.Context()) } ``` #### Http Client/Server ```shell go get -u github.com/AminoApps/context-propagation-go/module/context-propagation-http ``` ```go package main import cphttp "github.com/AminoApps/context-propagation-go/module/context-propagation-http" http.ListenAndServe(":8080", cphttp.Wrap(myHandler)) client := cphttp.WrapClient(&http.Client{}) // Please use the ctxhttp to wrap the request. resp, err := ctxhttp.Get(ctx, client, "http://127.0.0.1:8080/test") ``` #### Grpc Client/Server ```shell go get -u github.com/AminoApps/context-propagation-go/module/context-propagation-grpc ``` ```go package main import cpgrpc "github.com/AminoApps/context-propagation-go/module/context-propagation-grpc" server := grpc.NewServer(grpc.UnaryInterceptor(cpgrpc.NewUnaryServerInterceptor())) client := grpc.Dial(address, grpc.WithUnaryInterceptor(cpgrpc.NewUnaryClientInterceptor())) ```
27.15625
172
0.762179
yue_Hant
0.451225
dae056173b93804cd9153fa200f3e65e15ac361d
19,760
md
Markdown
articles/azure-monitor/alerts/alerts-log-create-templates.md
flexray/azure-docs.pl-pl
bfb8e5d5776d43b4623ce1c01dc44c8efc769c78
[ "CC-BY-4.0", "MIT" ]
12
2017-08-28T07:45:55.000Z
2022-03-07T21:35:48.000Z
articles/azure-monitor/alerts/alerts-log-create-templates.md
flexray/azure-docs.pl-pl
bfb8e5d5776d43b4623ce1c01dc44c8efc769c78
[ "CC-BY-4.0", "MIT" ]
441
2017-11-08T13:15:56.000Z
2021-06-02T10:39:53.000Z
articles/azure-monitor/alerts/alerts-log-create-templates.md
flexray/azure-docs.pl-pl
bfb8e5d5776d43b4623ce1c01dc44c8efc769c78
[ "CC-BY-4.0", "MIT" ]
27
2017-11-13T13:38:31.000Z
2022-02-17T11:57:33.000Z
--- title: Tworzenie alertu dziennika przy użyciu szablonu Azure Resource Manager | Microsoft Docs description: Dowiedz się, jak utworzyć alert dziennika przy użyciu szablonu Menedżer zasobów author: yanivlavi ms.author: yalavi ms.topic: conceptual ms.date: 09/22/2020 ms.openlocfilehash: 560f26b712818327294791feb787b5fe7c9d7b82 ms.sourcegitcommit: 867cb1b7a1f3a1f0b427282c648d411d0ca4f81f ms.translationtype: MT ms.contentlocale: pl-PL ms.lasthandoff: 03/20/2021 ms.locfileid: "102036473" --- # <a name="create-a-log-alert-with-a-resource-manager-template"></a>Tworzenie alertu dziennika za pomocą szablonu usługi Resource Manager Alerty dzienników umożliwiają użytkownikom użycie zapytania [log Analytics](../logs/log-analytics-tutorial.md) w celu obliczenia dzienników zasobów co określoną częstotliwość i wyzwolenia alertu na podstawie wyników. Reguły mogą wyzwalać uruchomienie co najmniej jednej akcji przy użyciu [grup akcji](./action-groups.md). [Dowiedz się więcej o funkcjach i terminologii alertów dzienników](./alerts-unified-log.md). W tym artykule pokazano, jak można użyć [szablonu Azure Resource Manager](../../azure-resource-manager/templates/template-syntax.md) , aby skonfigurować [alerty dzienników](./alerts-unified-log.md) w programie Azure monitor. Szablony Menedżer zasobów umożliwiają programistyczne Konfigurowanie alertów w sposób spójny i powtarzalny w środowiskach. Alerty dzienników są tworzone w `Microsoft.Insights/scheduledQueryRules` dostawcy zasobów. Zobacz Dokumentacja interfejsu API dla [zaplanowanych reguł zapytania API](/rest/api/monitor/scheduledqueryrules/). Podstawowe kroki są następujące: 1. Użyj jednego z poniższych szablonów jako pliku JSON, który opisuje sposób tworzenia alertu. 2. Edytuj odpowiedni plik parametrów w formacie JSON i używaj go w celu dostosowania alertu. 4. Wdróż szablon przy użyciu dowolnej metody wdrażania. > [!NOTE] > Dane dziennika z [obszaru roboczego log Analytics](../logs/log-analytics-tutorial.md) mogą być wysyłane do magazynu metryk Azure monitor. Alerty metryk mają [różne zachowanie](./alerts-metric-overview.md), co może być bardziej odpowiednie w zależności od danych, z którymi pracujesz. Aby uzyskać informacje na temat tego, co i jak można kierować dzienniki do metryk, zobacz [alert metryki dla dzienników](./alerts-metric-logs.md). > [!NOTE] > Alerty dzienników dla Log Analytics używane do zarządzania przy użyciu starszego [interfejsu API alertów log Analytics](./api-alerts.md) i starszych szablonów [log Analytics zapisanych wyszukiwań i alertów](../insights/solutions.md). [Dowiedz się więcej na temat przełączania do bieżącego interfejsu API ScheduledQueryRules](alerts-log-api-switch.md). ## <a name="simple-template-up-to-api-version-2018-04-16"></a>Prosty szablon (do interfejsu API w wersji 2018-04-16) Szablon [tworzenia reguł zaplanowanych zapytań](/rest/api/monitor/scheduledqueryrules/createorupdate) na podstawie [liczby alertów dziennika wyników](./alerts-unified-log.md#count-of-the-results-table-rows) (przykładowe dane są ustawiane jako zmienne): ```json { "$schema": "https://schema.management.azure.com/schemas/2015-01-01/deploymentTemplate.json#", "contentVersion": "1.0.0.0", "parameters": { }, "variables": { "alertLocation": "southcentralus", "alertName": "samplelogalert", "alertDescription": "Sample log search alert", "alertStatus": "true", "alertSource":{ "Query":"requests", "SourceId": "/subscriptions/a123d7efg-123c-1234-5678-a12bc3defgh4/resourceGroups/myRG/providers/microsoft.insights/components/sampleAIapplication", "Type":"ResultCount" }, "alertSchedule":{ "Frequency": 15, "Time": 60 }, "alertActions":{ "SeverityLevel": "4" }, "alertTrigger":{ "Operator":"GreaterThan", "Threshold":"1" }, "actionGrp":{ "ActionGroup": "/subscriptions/a123d7efg-123c-1234-5678-a12bc3defgh4/resourceGroups/myRG/providers/microsoft.insights/actiongroups/sampleAG", "Subject": "Customized Email Header", "Webhook": "{ \"alertname\":\"#alertrulename\", \"IncludeSearchResults\":true }" } }, "resources":[ { "name":"[variables('alertName')]", "type":"Microsoft.Insights/scheduledQueryRules", "apiVersion": "2018-04-16", "location": "[variables('alertLocation')]", "properties":{ "description": "[variables('alertDescription')]", "enabled": "[variables('alertStatus')]", "source": { "query": "[variables('alertSource').Query]", "dataSourceId": "[variables('alertSource').SourceId]", "queryType":"[variables('alertSource').Type]" }, "schedule":{ "frequencyInMinutes": "[variables('alertSchedule').Frequency]", "timeWindowInMinutes": "[variables('alertSchedule').Time]" }, "action":{ "odata.type": "Microsoft.WindowsAzure.Management.Monitoring.Alerts.Models.Microsoft.AppInsights.Nexus.DataContracts.Resources.ScheduledQueryRules.AlertingAction", "severity":"[variables('alertActions').SeverityLevel]", "aznsAction":{ "actionGroup":"[array(variables('actionGrp').ActionGroup)]", "emailSubject":"[variables('actionGrp').Subject]", "customWebhookPayload":"[variables('actionGrp').Webhook]" }, "trigger":{ "thresholdOperator":"[variables('alertTrigger').Operator]", "threshold":"[variables('alertTrigger').Threshold]" } } } } ] } ``` Ten plik JSON można zapisać i wdrożyć przy użyciu [Azure Resource Manager w Azure Portal](../../azure-resource-manager/templates/deploy-portal.md#deploy-resources-from-custom-template). ## <a name="template-with-cross-resource-query-up-to-api-version-2018-04-16"></a>Szablon z zapytaniem dotyczącym wielu zasobów (do interfejsu API w wersji 2018-04-16) Szablon [tworzenia reguł zaplanowanych zapytań](/rest/api/monitor/scheduledqueryrules/createorupdate) opartych na [pomiarach metryk](./alerts-unified-log.md#calculation-of-measure-based-on-a-numeric-column-such-as-cpu-counter-value) , które wykonują zapytania o [zasoby krzyżowe](../logs/cross-workspace-query.md) (przykładowe dane ustawione jako zmienne): ```json { "$schema": "https://schema.management.azure.com/schemas/2015-01-01/deploymentTemplate.json#", "contentVersion": "1.0.0.0", "parameters": { }, "variables": { "alertLocation": "Region Name for your Application Insights App or Log Analytics Workspace", "alertName": "sample log alert", "alertDescr": "Sample log search alert", "alertStatus": "true", "alertSource":{ "Query":"union workspace(\"servicews\").Update, app('serviceapp').requests | summarize AggregatedValue = count() by bin(TimeGenerated,1h), Classification", "Resource1": "/subscriptions/a123d7efg-123c-1234-5678-a12bc3defgh4/resourceGroups/contosoRG/providers/microsoft.OperationalInsights/workspaces/servicews", "Resource2": "/subscriptions/a123d7efg-123c-1234-5678-a12bc3defgh4/resourceGroups/contosoRG/providers/microsoft.insights/components/serviceapp", "SourceId": "/subscriptions/a123d7efg-123c-1234-5678-a12bc3defgh4/resourceGroups/contosoRG/providers/microsoft.OperationalInsights/workspaces/servicews", "Type":"ResultCount" }, "alertSchedule":{ "Frequency": 15, "Time": 60 }, "alertActions":{ "SeverityLevel": "4", "SuppressTimeinMin": 20 }, "alertTrigger":{ "Operator":"GreaterThan", "Threshold":"1" }, "metricMeasurement": { "thresholdOperator": "Equal", "threshold": "1", "metricTriggerType": "Consecutive", "metricColumn": "Classification" }, "actionGrp":{ "ActionGroup": "/subscriptions/a123d7efg-123c-1234-5678-a12bc3defgh4/resourceGroups/contosoRG/providers/microsoft.insights/actiongroups/sampleAG", "Subject": "Customized Email Header", "Webhook": "{ \"alertname\":\"#alertrulename\", \"IncludeSearchResults\":true }" } }, "resources":[ { "name":"[variables('alertName')]", "type":"Microsoft.Insights/scheduledQueryRules", "apiVersion": "2018-04-16", "location": "[variables('alertLocation')]", "properties":{ "description": "[variables('alertDescr')]", "enabled": "[variables('alertStatus')]", "source": { "query": "[variables('alertSource').Query]", "authorizedResources": "[concat(array(variables('alertSource').Resource1), array(variables('alertSource').Resource2))]", "dataSourceId": "[variables('alertSource').SourceId]", "queryType":"[variables('alertSource').Type]" }, "schedule":{ "frequencyInMinutes": "[variables('alertSchedule').Frequency]", "timeWindowInMinutes": "[variables('alertSchedule').Time]" }, "action":{ "odata.type": "Microsoft.WindowsAzure.Management.Monitoring.Alerts.Models.Microsoft.AppInsights.Nexus.DataContracts.Resources.ScheduledQueryRules.AlertingAction", "severity":"[variables('alertActions').SeverityLevel]", "throttlingInMin": "[variables('alertActions').SuppressTimeinMin]", "aznsAction":{ "actionGroup": "[array(variables('actionGrp').ActionGroup)]", "emailSubject":"[variables('actionGrp').Subject]", "customWebhookPayload":"[variables('actionGrp').Webhook]" }, "trigger":{ "thresholdOperator":"[variables('alertTrigger').Operator]", "threshold":"[variables('alertTrigger').Threshold]", "metricTrigger":{ "thresholdOperator": "[variables('metricMeasurement').thresholdOperator]", "threshold": "[variables('metricMeasurement').threshold]", "metricColumn": "[variables('metricMeasurement').metricColumn]", "metricTriggerType": "[variables('metricMeasurement').metricTriggerType]" } } } } } ] } ``` > [!IMPORTANT] > W przypadku korzystania z zapytania między zasobami w alercie dziennika użycie [authorizedResources](/rest/api/monitor/scheduledqueryrules/createorupdate#source) jest obowiązkowe, a użytkownik musi mieć dostęp do listy podanych zasobów Ten plik JSON można zapisać i wdrożyć przy użyciu [Azure Resource Manager w Azure Portal](../../azure-resource-manager/templates/deploy-portal.md#deploy-resources-from-custom-template). ## <a name="template-for-all-resource-types-from-api-version-2020-05-01-preview"></a>Szablon wszystkich typów zasobów (z interfejsu API w wersji 2020-05-01 — wersja zapoznawcza) Szablon [tworzenia reguł zaplanowanych zapytań](/rest/api/monitor/scheduledqueryrules/createorupdate) dla wszystkich typów zasobów (przykładowe dane są ustawiane jako zmienne): ```json { "$schema": "https://schema.management.azure.com/schemas/2015-01-01/deploymentTemplate.json#", "contentVersion": "1.0.0.0", "parameters": { "alertName": { "type": "string", "minLength": 1, "metadata": { "description": "Name of the alert" } }, "location": { "type": "string", "minLength": 1, "metadata": { "description": "Location of the alert" } }, "alertDescription": { "type": "string", "defaultValue": "This is a metric alert", "metadata": { "description": "Description of alert" } }, "alertSeverity": { "type": "int", "defaultValue": 3, "allowedValues": [ 0, 1, 2, 3, 4 ], "metadata": { "description": "Severity of alert {0,1,2,3,4}" } }, "isEnabled": { "type": "bool", "defaultValue": true, "metadata": { "description": "Specifies whether the alert is enabled" } }, "resourceId": { "type": "string", "minLength": 1, "metadata": { "description": "Full Resource ID of the resource emitting the metric that will be used for the comparison. For example /subscriptions/00000000-0000-0000-0000-0000-00000000/resourceGroups/ResourceGroupName/providers/Microsoft.compute/virtualMachines/VM_xyz" } }, "query": { "type": "string", "minLength": 1, "metadata": { "description": "Name of the metric used in the comparison to activate the alert." } }, "metricMeasureColumn": { "type": "string", "metadata": { "description": "Name of the measure column used in the alert evaluation." } }, "resourceIdColumn": { "type": "string", "metadata": { "description": "Name of the resource ID column used in the alert targeting the alerts." } }, "operator": { "type": "string", "defaultValue": "GreaterThan", "allowedValues": [ "Equals", "NotEquals", "GreaterThan", "GreaterThanOrEqual", "LessThan", "LessThanOrEqual" ], "metadata": { "description": "Operator comparing the current value with the threshold value." } }, "threshold": { "type": "string", "defaultValue": "0", "metadata": { "description": "The threshold value at which the alert is activated." } }, "numberOfEvaluationPeriods": { "type": "string", "defaultValue": "4", "metadata": { "description": "The number of periods to check in the alert evaluation." } }, "minFailingPeriodsToAlert": { "type": "string", "defaultValue": "3", "metadata": { "description": "The number of unhealthy periods to alert on (must be lower or equal to numberOfEvaluationPeriods)." } }, "timeAggregation": { "type": "string", "defaultValue": "Average", "allowedValues": [ "Average", "Minimum", "Maximum", "Total", "Count" ], "metadata": { "description": "How the data that is collected should be combined over time." } }, "windowSize": { "type": "string", "defaultValue": "PT5M", "allowedValues": [ "PT1M", "PT5M", "PT15M", "PT30M", "PT1H", "PT6H", "PT12H", "PT24H" ], "metadata": { "description": "Period of time used to monitor alert activity based on the threshold. Must be between one minute and one day. ISO 8601 duration format." } }, "evaluationFrequency": { "type": "string", "defaultValue": "PT1M", "allowedValues": [ "PT1M", "PT5M", "PT15M", "PT30M", "PT1H" ], "metadata": { "description": "how often the metric alert is evaluated represented in ISO 8601 duration format" } }, "muteActionsDuration": { "type": "string", "defaultValue": "PT5M", "allowedValues": [ "PT1M", "PT5M", "PT15M", "PT30M", "PT1H", "PT6H", "PT12H", "PT24H" ], "metadata": { "description": "Mute actions for the chosen period of time (in ISO 8601 duration format) after the alert is fired." } }, "actionGroupId": { "type": "string", "defaultValue": "", "metadata": { "description": "The ID of the action group that is triggered when the alert is activated or deactivated" } } }, "variables": { }, "resources": [ { "name": "[parameters('alertName')]", "type": "Microsoft.Insights/scheduledQueryRules", "location": "[parameters('location')]", "apiVersion": "2020-05-01-preview", "tags": {}, "properties": { "description": "[parameters('alertDescription')]", "severity": "[parameters('alertSeverity')]", "enabled": "[parameters('isEnabled')]", "scopes": ["[parameters('resourceId')]"], "evaluationFrequency":"[parameters('evaluationFrequency')]", "windowSize": "[parameters('windowSize')]", "criteria": { "allOf": [ { "query": "[parameters('query')]", "metricMeasureColumn": "[parameters('metricMeasureColumn')]", "resourceIdColumn": "[parameters('resourceIdColumn')]", "dimensions":[], "operator": "[parameters('operator')]", "threshold" : "[parameters('threshold')]", "timeAggregation": "[parameters('timeAggregation')]", "failingPeriods": { "numberOfEvaluationPeriods": "[parameters('numberOfEvaluationPeriods')]", "minFailingPeriodsToAlert": "[parameters('minFailingPeriodsToAlert')]" } } ] }, "muteActionsDuration": "[parameters('muteActionsDuration')]", "actions": [ { "actionGroupId": "[parameters('actionGroupId')]" } ] } } ] } ``` Ten plik JSON można zapisać i wdrożyć przy użyciu [Azure Resource Manager w Azure Portal](../../azure-resource-manager/templates/deploy-portal.md#deploy-resources-from-custom-template). ## <a name="next-steps"></a>Następne kroki * Informacje o [alertach dziennika](./alerts-unified-log.md) * Więcej informacji na temat [zarządzania alertami dziennika](./alerts-log.md) * Informacje [o akcjach elementu webhook dla alertów dziennika](./alerts-log-webhook.md) * Dowiedz się więcej o [zapytaniach dziennika](../logs/log-query-overview.md).
45.217391
554
0.570698
pol_Latn
0.508585
dae0b04c121d8d4607e87581a1d07a9d91945593
5,863
md
Markdown
articles/virtual-machines/image-builder-overview.md
zer0big/azure-docs.ko-kr
cf9888afefeb065cb30a49deba730f39142f118a
[ "CC-BY-4.0", "MIT" ]
null
null
null
articles/virtual-machines/image-builder-overview.md
zer0big/azure-docs.ko-kr
cf9888afefeb065cb30a49deba730f39142f118a
[ "CC-BY-4.0", "MIT" ]
null
null
null
articles/virtual-machines/image-builder-overview.md
zer0big/azure-docs.ko-kr
cf9888afefeb065cb30a49deba730f39142f118a
[ "CC-BY-4.0", "MIT" ]
null
null
null
--- title: Azure 이미지 작성기 (미리 보기)에 대해 알아보기 description: Azure의 가상 컴퓨터에 대 한 Azure 이미지 빌더에 대해 자세히 알아보세요. author: danielsollondon ms.author: danis ms.date: 05/02/2019 ms.topic: conceptual ms.service: virtual-machines ms.subservice: imaging ms.reviewer: cynthn ms.openlocfilehash: fb596352011bcce3130d22b7277444bc45679f4c ms.sourcegitcommit: 8b4b4e060c109a97d58e8f8df6f5d759f1ef12cf ms.translationtype: MT ms.contentlocale: ko-KR ms.lasthandoff: 12/07/2020 ms.locfileid: "96841586" --- # <a name="preview-azure-image-builder-overview"></a>미리 보기: Azure 이미지 작성기 개요 조직에서는 표준화된 VM(가상 머신) 이미지를 사용하여 클라우드로 마이그레이션하고 배포의 일관성을 유지할 수 있습니다. 이미지에는 일반적으로 미리 정의된 보안 및 구성 설정과 필수 소프트웨어가 포함되어 있습니다. 사용자 고유의 이미징 파이프라인을 설정하려면 시간, 인프라 및 설정이 필요하지만, Azure VM Image Builder를 사용하면 이미지를 설명하는 간단한 구성을 제공하고, 서비스에 제출하고, 이미지를 빌드하고 배포하기만 하면 됩니다. Azure VM Image Builder(Azure Image Builder)를 사용하여 Windows 또는 Linux 기반 Azure Marketplace 이미지, 기존 사용자 지정 이미지 또는 RHEL(Red Hat Enterprise Linux) ISO로 시작하고 사용자 지정 항목을 추가할 수 있습니다. 이 Image Builder는 [HashiCorp Packer](https://packer.io/)를 토대로 구축되었으므로 기존 Packer 셸 프로비저닝 프로그램 스크립트를 가져올 수도 있습니다. [Azure Shared Image Gallery](shared-image-galleries.md)에서 이미지를 호스트하는 위치를 관리형 이미지 또는 VHD로 지정할 수도 있습니다. > [!IMPORTANT] > Azure Image Builder는 현재 퍼블릭 미리 보기로 제공됩니다. > 이 미리 보기 버전은 서비스 수준 계약 없이 제공되며 프로덕션 워크로드에는 사용하지 않는 것이 좋습니다. 특정 기능이 지원되지 않거나 기능이 제한될 수 있습니다. 자세한 내용은 [Microsoft Azure Preview에 대한 추가 사용 약관](https://azure.microsoft.com/support/legal/preview-supplemental-terms/)을 참조하세요. ## <a name="preview-features"></a>미리 보기 기능 미리 보기에서는 다음 기능이 지원됩니다. - 골드 기준 이미지를 만들기: 최소 보안 및 회사 구성을 포함하는 이러한 이미지를 만들 수 있으므로 부서에서 요구에 맞게 추가로 사용자 지정할 수 있습니다. - 기존 이미지 패치: Image Builder에서 기존 사용자 지정 이미지를 지속적으로 패치할 수 있습니다. - Image Builder를 기존 가상 네트워크에 연결: 기존 구성 서버(DSC, Chef, Puppet 등), 파일 공유 또는 라우팅할 수 있는 다른 서버/서비스에 연결할 수 있습니다. - Azure Shared Image Gallery와 통합: 이미지를 전역적으로 배포하고, 버전을 관리하고, 크기를 조정할 수 있으며 이미지 관리 시스템을 사용할 수 있습니다. - 기존 이미지 빌드 파이프라인과 통합: 파이프라인에서 Image Builder를 호출하거나 간단한 Preview Image Builder Azure DevOps 태스크를 사용합니다. - 기존 이미지 사용자 지정 파이프라인을 Azure로 마이그레이션 기존 스크립트, 명령 및 프로세스를 사용하여 이미지 사용자 지정 - VHD 형식의 이미지를 만들어 Azure Stack 지원 ## <a name="regions"></a>영역 이러한 지역에서는 Azure Image Builder 서비스를 미리 보기로 사용할 수 있습니다. 이러한 영역 외부로 이미지를 배포할 수 있습니다. - 미국 동부 - 미국 동부 2 - 미국 중서부 - 미국 서부 - 미국 서부 2 - 북유럽 - 서유럽 ## <a name="os-support"></a>OS 지원 AIB는 다음과 같은 Azure Marketplace 기본 OS 이미지를 지원합니다. - Ubuntu 18.04 - Ubuntu 16.04 - RHEL 7.6, 7.7 - CentOS 7.6, 7.7 - SLES 12 SP4 - SLES 15, SLES 15 SP1 - Windows 10 RS5 Enterprise/Enterprise 다중 세션/Professional - Windows 2016 - Windows 2019 RHEL ISO 지원은 더 이상 지원되지 않습니다. ## <a name="how-it-works"></a>작동 방법 Azure Image Builder는 Azure 리소스 공급자가 액세스할 수 있는 완전 관리형 Azure 서비스입니다. Azure Image Builder 프로세스는 원본, 사용자 지정 및 배포의 세 가지 주요 부분으로 구성되며 템플릿에 표시됩니다. 아래 다이어그램에는 구성 요소와 해당 속성 일부가 표시됩니다. **Image Builder 프로세스** ![Azure Image Builder 프로세스의 개념도](./media/image-builder-overview/image-builder-process.png) 1. 이미지 템플릿을 .json 파일로 만듭니다. 이 .json 파일에는 이미지 원본, 사용자 지정 및 배포에 대한 정보가 포함되어 있습니다. [Azure Image Builder GitHub 리포지토리](https://github.com/danielsollondon/azvmimagebuilder/tree/master/quickquickstarts)에 여러 예제가 나와 있습니다. 1. 서비스에 제출하면 지정한 리소스 그룹에 이미지 템플릿 아티팩트가 만들어집니다. 백그라운드에서 Image Builder는 필요에 따라 원본 이미지나 ISO 및 스크립트를 다운로드합니다. 이러한 리소스는 구독에서 자동으로 생성 되는 별도의 리소스 그룹에 저장 됩니다. IT_ _ 형식으로 되어 있습니다. \<DestinationResourceGroup> \<TemplateName> 1. 이미지 템플릿이 만들어지면 이미지를 빌드할 수 있습니다. 배경에서 이미지 작성기는 템플릿 및 원본 파일을 사용 하 여 VM (기본 크기: Standard_D1_v2), 네트워크, 공용 IP, NSG 및 IT_ \<DestinationResourceGroup> _ 리소스 그룹의 저장소를 만듭니다 \<TemplateName> . 1. 이미지를 만들 때 이미지 작성기는 템플릿에 따라 이미지를 배포한 다음 해당 \<DestinationResourceGroup> \<TemplateName> 프로세스에 대해 만들어진 IT_ _ 리소스 그룹의 추가 리소스를 삭제 합니다. ## <a name="permissions"></a>사용 권한 (AIB)에 등록하면 AIB 서비스에 스테이징 리소스 그룹(IT_*)을 만들고, 관리하고, 삭제할 수 있는 권한이 부여되며, 이미지 빌드에 필요한 리소스를 추가할 수 있는 권한이 부여됩니다. 성공적으로 등록하는 동안 구독에서 사용할 수 있는 AIB SPN(서비스 주체 이름)이 이 작업을 수행합니다. Azure VM Image Builder에서 관리형 이미지 또는 공유 이미지 갤러리에 이미지를 배포하도록 허용하려면 이미지를 읽고 쓸 수 있는 권한이 있는 Azure 사용자 할당 ID를 만들어야 합니다. Azure Storage에 액세스하는 경우 프라이빗 컨테이너를 읽을 수 있는 권한이 필요합니다. 처음에는 ID를 만드는 방법에 대한 [Azure 사용자 할당 관리 ID](../active-directory/managed-identities-azure-resources/how-to-manage-ua-identity-cli.md) 설명서를 따라야 합니다. 권한을 부여해야 하는 ID가 일단 있으면 Azure 사용자 지정 역할 정의를 사용한 후 사용자 할당 관리 ID를 할당하여 사용자 지정 역할 정의를 사용할 수 있습니다. 사용 권한은 [여기](https://github.com/danielsollondon/azvmimagebuilder/blob/master/aibPermissions.md#azure-vm-image-builder-permissions-explained-and-requirements)에 좀 더 자세히 설명되며 구현 방법을 보여 주는 예제가 나와 있습니다. > [!Note] > 이전에 AIB SPN을 사용하고 이미지 리소스 그룹에 SPN 권한을 부여한 적이 있을 것입니다. 향후 기능을 허용하기 위해 이 모델에서 전환할 것입니다. 2020년 5월 26일부터 Image Builder는 사용자 할당 ID가 없는 템플릿을 수락하지 않으며 기존 템플릿은 [사용자 ID](./linux/image-builder-json.md)를 사용하여 서비스에 다시 제출해야 합니다. 여기에 나오는 예제는 사용자 할당 ID를 만들고 템플릿에 추가하는 방법을 보여 줍니다. 자세한 내용은 이 [설명서](https://github.com/danielsollondon/azvmimagebuilder#service-updates-and-latest-release-information)에서 이러한 변경 및 릴리스 업데이트를 검토하세요. ## <a name="costs"></a>비용 Azure Image Builder를 사용하여 이미지를 만들고, 빌드하고, 저장하는 경우 컴퓨팅, 네트워킹 및 스토리지 비용이 일부 발생합니다. 이러한 비용은 사용자 지정 이미지를 수동으로 만들 때 발생하는 비용과 비슷합니다. 리소스의 경우 Azure 요금에 청구됩니다. 이미지 생성 프로세스 동안 파일이 다운로드되어 `IT_<DestinationResourceGroup>_<TemplateName>` 리소스 그룹에 저장되므로 스토리지 비용이 약간 발생합니다. 이러한 상황을 유지하지 않으려면 이미지를 빌드한 후 **이미지 템플릿** 을 삭제합니다. Image Builder는 VM에 필요한 D1v2 VM 크기, 스토리지 및 네트워킹을 사용하여 VM을 만듭니다. 이러한 리소스는 빌드 프로세스가 지속되는 동안 유지되며, Image Builder에서 이미지 만들기를 완료하면 삭제됩니다. Azure Image Builder에서는 선택한 지역에 이미지를 배포하므로 네트워크 송신 요금이 발생할 수 있습니다. ## <a name="hyper-v-generation"></a>Hyper-V 세대 이미지 작성기는 현재 Azure 공유 이미지 갤러리 (SIG) 또는 관리 되는 이미지에 대 한 Gen1 (Hyper-v 생성) 1 이미지 만들기만 기본적으로 지원 합니다. Gen2 이미지를 만들려는 경우 원본 Gen2 이미지를 사용 하 고 VHD에 배포 해야 합니다. 그런 다음 VHD에서 관리 되는 이미지를 만들고이를 Gen2 이미지로 SIG에 삽입 해야 합니다. ## <a name="next-steps"></a>다음 단계 Azure Image Builder를 사용해 보려면 [Linux](./linux/image-builder.md) 또는 [Windows](./windows/image-builder.md) 이미지 빌드에 대한 문서를 참조하세요.
53.3
410
0.740918
kor_Hang
1.00001
dae1783cc49665ebedfc51236e8c9301046fbdd1
10,060
md
Markdown
docs/versioned_docs/version-4.2/api/classes/entitymetadata.md
dvlalex/mikro-orm
f3beb7f8ceb943309ed35075e1a021627cf7634e
[ "MIT" ]
1
2020-12-27T16:40:48.000Z
2020-12-27T16:40:48.000Z
docs/versioned_docs/version-4.2/api/classes/entitymetadata.md
dvlalex/mikro-orm
f3beb7f8ceb943309ed35075e1a021627cf7634e
[ "MIT" ]
null
null
null
docs/versioned_docs/version-4.2/api/classes/entitymetadata.md
dvlalex/mikro-orm
f3beb7f8ceb943309ed35075e1a021627cf7634e
[ "MIT" ]
1
2021-01-23T02:10:54.000Z
2021-01-23T02:10:54.000Z
--- id: "entitymetadata" title: "Class: EntityMetadata<T, T>" sidebar_label: "EntityMetadata" --- ## Type parameters Name | Type | Default | ------ | ------ | ------ | `T` | [AnyEntity](../index.md#anyentity)&#60;T> | any | `T` | [AnyEntity](../index.md#anyentity)&#60;T> | any | ## Hierarchy * **EntityMetadata** ## Constructors ### constructor \+ **new EntityMetadata**(`meta?`: Partial&#60;[EntityMetadata](entitymetadata.md)>): [EntityMetadata](entitymetadata.md) *Defined in [packages/core/src/typings.ts:180](https://github.com/mikro-orm/mikro-orm/blob/8766baa31/packages/core/src/typings.ts#L180)* #### Parameters: Name | Type | Default value | ------ | ------ | ------ | `meta` | Partial&#60;[EntityMetadata](entitymetadata.md)> | {} | **Returns:** [EntityMetadata](entitymetadata.md) ## Properties ### abstract • **abstract**: boolean *Defined in [packages/core/src/typings.ts:291](https://github.com/mikro-orm/mikro-orm/blob/8766baa31/packages/core/src/typings.ts#L291)* ___ ### class • **class**: [Constructor](../index.md#constructor)&#60;T> *Defined in [packages/core/src/typings.ts:290](https://github.com/mikro-orm/mikro-orm/blob/8766baa31/packages/core/src/typings.ts#L290)* ___ ### className • **className**: string *Defined in [packages/core/src/typings.ts:264](https://github.com/mikro-orm/mikro-orm/blob/8766baa31/packages/core/src/typings.ts#L264)* ___ ### collection • **collection**: string *Defined in [packages/core/src/typings.ts:274](https://github.com/mikro-orm/mikro-orm/blob/8766baa31/packages/core/src/typings.ts#L274)* ___ ### comment • `Optional` **comment**: string *Defined in [packages/core/src/typings.ts:294](https://github.com/mikro-orm/mikro-orm/blob/8766baa31/packages/core/src/typings.ts#L294)* ___ ### comparableProps • **comparableProps**: [EntityProperty](../interfaces/entityproperty.md)&#60;T>[] *Defined in [packages/core/src/typings.ts:283](https://github.com/mikro-orm/mikro-orm/blob/8766baa31/packages/core/src/typings.ts#L283)* ___ ### compositePK • **compositePK**: boolean *Defined in [packages/core/src/typings.ts:277](https://github.com/mikro-orm/mikro-orm/blob/8766baa31/packages/core/src/typings.ts#L277)* ___ ### constructorParams • **constructorParams**: string[] *Defined in [packages/core/src/typings.ts:271](https://github.com/mikro-orm/mikro-orm/blob/8766baa31/packages/core/src/typings.ts#L271)* ___ ### customRepository • **customRepository**: () => [Constructor](../index.md#constructor)&#60;[EntityRepository](entityrepository.md)&#60;T>> *Defined in [packages/core/src/typings.ts:287](https://github.com/mikro-orm/mikro-orm/blob/8766baa31/packages/core/src/typings.ts#L287)* ___ ### discriminatorColumn • `Optional` **discriminatorColumn**: string *Defined in [packages/core/src/typings.ts:267](https://github.com/mikro-orm/mikro-orm/blob/8766baa31/packages/core/src/typings.ts#L267)* ___ ### discriminatorMap • `Optional` **discriminatorMap**: [Dictionary](../index.md#dictionary)&#60;string> *Defined in [packages/core/src/typings.ts:269](https://github.com/mikro-orm/mikro-orm/blob/8766baa31/packages/core/src/typings.ts#L269)* ___ ### discriminatorValue • `Optional` **discriminatorValue**: string *Defined in [packages/core/src/typings.ts:268](https://github.com/mikro-orm/mikro-orm/blob/8766baa31/packages/core/src/typings.ts#L268)* ___ ### embeddable • **embeddable**: boolean *Defined in [packages/core/src/typings.ts:270](https://github.com/mikro-orm/mikro-orm/blob/8766baa31/packages/core/src/typings.ts#L270)* ___ ### extends • **extends**: string *Defined in [packages/core/src/typings.ts:273](https://github.com/mikro-orm/mikro-orm/blob/8766baa31/packages/core/src/typings.ts#L273)* ___ ### filters • **filters**: [Dictionary](../index.md#dictionary)&#60;[FilterDef](../index.md#filterdef)&#60;T>> *Defined in [packages/core/src/typings.ts:293](https://github.com/mikro-orm/mikro-orm/blob/8766baa31/packages/core/src/typings.ts#L293)* ___ ### hooks • **hooks**: Partial&#60;Record&#60;keyof *typeof* EventType, string & keyof T[]>> *Defined in [packages/core/src/typings.ts:288](https://github.com/mikro-orm/mikro-orm/blob/8766baa31/packages/core/src/typings.ts#L288)* ___ ### hydrateProps • **hydrateProps**: [EntityProperty](../interfaces/entityproperty.md)&#60;T>[] *Defined in [packages/core/src/typings.ts:284](https://github.com/mikro-orm/mikro-orm/blob/8766baa31/packages/core/src/typings.ts#L284)* ___ ### indexes • **indexes**: { name?: string ; options?: [Dictionary](../index.md#dictionary) ; properties: keyof T & string \| keyof T & string[] ; type?: string }[] *Defined in [packages/core/src/typings.ts:285](https://github.com/mikro-orm/mikro-orm/blob/8766baa31/packages/core/src/typings.ts#L285)* ___ ### name • `Optional` **name**: string *Defined in [packages/core/src/typings.ts:263](https://github.com/mikro-orm/mikro-orm/blob/8766baa31/packages/core/src/typings.ts#L263)* ___ ### path • **path**: string *Defined in [packages/core/src/typings.ts:275](https://github.com/mikro-orm/mikro-orm/blob/8766baa31/packages/core/src/typings.ts#L275)* ___ ### pivotTable • **pivotTable**: boolean *Defined in [packages/core/src/typings.ts:266](https://github.com/mikro-orm/mikro-orm/blob/8766baa31/packages/core/src/typings.ts#L266)* ___ ### primaryKeys • **primaryKeys**: keyof T & string[] *Defined in [packages/core/src/typings.ts:276](https://github.com/mikro-orm/mikro-orm/blob/8766baa31/packages/core/src/typings.ts#L276)* ___ ### properties • **properties**: {} *Defined in [packages/core/src/typings.ts:280](https://github.com/mikro-orm/mikro-orm/blob/8766baa31/packages/core/src/typings.ts#L280)* ___ ### propertyOrder • `Readonly` **propertyOrder**: Map&#60;string, number> = new Map&#60;string, number>() *Defined in [packages/core/src/typings.ts:180](https://github.com/mikro-orm/mikro-orm/blob/8766baa31/packages/core/src/typings.ts#L180)* ___ ### props • **props**: [EntityProperty](../interfaces/entityproperty.md)&#60;T>[] *Defined in [packages/core/src/typings.ts:281](https://github.com/mikro-orm/mikro-orm/blob/8766baa31/packages/core/src/typings.ts#L281)* ___ ### prototype • **prototype**: T *Defined in [packages/core/src/typings.ts:289](https://github.com/mikro-orm/mikro-orm/blob/8766baa31/packages/core/src/typings.ts#L289)* ___ ### readonly • `Optional` **readonly**: boolean *Defined in [packages/core/src/typings.ts:296](https://github.com/mikro-orm/mikro-orm/blob/8766baa31/packages/core/src/typings.ts#L296)* ___ ### relations • **relations**: [EntityProperty](../interfaces/entityproperty.md)&#60;T>[] *Defined in [packages/core/src/typings.ts:282](https://github.com/mikro-orm/mikro-orm/blob/8766baa31/packages/core/src/typings.ts#L282)* ___ ### root • **root**: [EntityMetadata](entitymetadata.md)&#60;T> *Defined in [packages/core/src/typings.ts:297](https://github.com/mikro-orm/mikro-orm/blob/8766baa31/packages/core/src/typings.ts#L297)* ___ ### selfReferencing • `Optional` **selfReferencing**: boolean *Defined in [packages/core/src/typings.ts:295](https://github.com/mikro-orm/mikro-orm/blob/8766baa31/packages/core/src/typings.ts#L295)* ___ ### serializedPrimaryKey • **serializedPrimaryKey**: keyof T & string *Defined in [packages/core/src/typings.ts:279](https://github.com/mikro-orm/mikro-orm/blob/8766baa31/packages/core/src/typings.ts#L279)* ___ ### tableName • **tableName**: string *Defined in [packages/core/src/typings.ts:265](https://github.com/mikro-orm/mikro-orm/blob/8766baa31/packages/core/src/typings.ts#L265)* ___ ### toJsonParams • **toJsonParams**: string[] *Defined in [packages/core/src/typings.ts:272](https://github.com/mikro-orm/mikro-orm/blob/8766baa31/packages/core/src/typings.ts#L272)* ___ ### uniques • **uniques**: { name?: string ; options?: [Dictionary](../index.md#dictionary) ; properties: keyof T & string \| keyof T & string[] }[] *Defined in [packages/core/src/typings.ts:286](https://github.com/mikro-orm/mikro-orm/blob/8766baa31/packages/core/src/typings.ts#L286)* ___ ### useCache • **useCache**: boolean *Defined in [packages/core/src/typings.ts:292](https://github.com/mikro-orm/mikro-orm/blob/8766baa31/packages/core/src/typings.ts#L292)* ___ ### versionProperty • **versionProperty**: keyof T & string *Defined in [packages/core/src/typings.ts:278](https://github.com/mikro-orm/mikro-orm/blob/8766baa31/packages/core/src/typings.ts#L278)* ## Methods ### addProperty ▸ **addProperty**(`prop`: [EntityProperty](../interfaces/entityproperty.md)&#60;T>, `sync?`: boolean): void *Defined in [packages/core/src/typings.ts:193](https://github.com/mikro-orm/mikro-orm/blob/8766baa31/packages/core/src/typings.ts#L193)* #### Parameters: Name | Type | Default value | ------ | ------ | ------ | `prop` | [EntityProperty](../interfaces/entityproperty.md)&#60;T> | - | `sync` | boolean | true | **Returns:** void ___ ### initIndexes ▸ `Private`**initIndexes**(`prop`: [EntityProperty](../interfaces/entityproperty.md)&#60;T>): void *Defined in [packages/core/src/typings.ts:232](https://github.com/mikro-orm/mikro-orm/blob/8766baa31/packages/core/src/typings.ts#L232)* #### Parameters: Name | Type | ------ | ------ | `prop` | [EntityProperty](../interfaces/entityproperty.md)&#60;T> | **Returns:** void ___ ### removeProperty ▸ **removeProperty**(`name`: string, `sync?`: boolean): void *Defined in [packages/core/src/typings.ts:203](https://github.com/mikro-orm/mikro-orm/blob/8766baa31/packages/core/src/typings.ts#L203)* #### Parameters: Name | Type | Default value | ------ | ------ | ------ | `name` | string | - | `sync` | boolean | true | **Returns:** void ___ ### sync ▸ **sync**(`initIndexes?`: boolean): void *Defined in [packages/core/src/typings.ts:213](https://github.com/mikro-orm/mikro-orm/blob/8766baa31/packages/core/src/typings.ts#L213)* #### Parameters: Name | Type | Default value | ------ | ------ | ------ | `initIndexes` | boolean | false | **Returns:** void
25.994832
154
0.704871
yue_Hant
0.440541
dae189a8aac9b83503da32ecccd8c49aa1d7462b
1,308
md
Markdown
examples/SQ_Energy_Balance/README.md
AgriculturalModelExchangeInitiative/Crop2mlWorkshop
eb2962da4960874e573ae76480ec2ff43d87e6f8
[ "MIT" ]
null
null
null
examples/SQ_Energy_Balance/README.md
AgriculturalModelExchangeInitiative/Crop2mlWorkshop
eb2962da4960874e573ae76480ec2ff43d87e6f8
[ "MIT" ]
null
null
null
examples/SQ_Energy_Balance/README.md
AgriculturalModelExchangeInitiative/Crop2mlWorkshop
eb2962da4960874e573ae76480ec2ff43d87e6f8
[ "MIT" ]
null
null
null
AgriculturalModelExchangeInitiative ============================== Model of Energy Balance of the crop growth simulation model SiriusQuality Project Organization ------------ ``` ├── LICENSE.txt <- License file ├── README.md <- The top-level README for AMEI members using this project. ├── data/ <- data used for model simulation │ ├── doc/ <- Package documentation │ ├── test/ <- model tests for each language and platform │ ├── crop2ml/ <- model units and composite in crop2ml format. │ ├── xml files │ ├── Algo/ | │ ├── src/ <- Executable source in different language and platform │ ├── pyx/ │ ├── java/ │ ├── py/ ├── R/ ├── cpp/ ├── cs/ ├── f90/ ├── Bioma/ ├── Sirius/ ├── Simplace/ ├── OpenAlea/ ├── Record/ ├── DSSAT/ ``` Model visualization ------------------- ![](doc/images/energy_balance2.png) An example of transformation in OpenAlea ---------------------------------------- ![](doc/images/energy_balance.png) An example of unit test in R (Penman model unit) ------------------------------------------------ ![](doc/images/test_penman.PNG)
24.222222
87
0.468654
eng_Latn
0.647949
dae1c1b33cb49e77b2c520c6e4c7339223868bb9
665
md
Markdown
src/cards/52-hashtag-selfiemotion.md
jasonalderman/thespacedeck.org
17c8b18160b00f4480e6eb9656fdc44e459f9c22
[ "MIT" ]
null
null
null
src/cards/52-hashtag-selfiemotion.md
jasonalderman/thespacedeck.org
17c8b18160b00f4480e6eb9656fdc44e459f9c22
[ "MIT" ]
null
null
null
src/cards/52-hashtag-selfiemotion.md
jasonalderman/thespacedeck.org
17c8b18160b00f4480e6eb9656fdc44e459f9c22
[ "MIT" ]
null
null
null
--- number: 52 # Putting title in single quotes, because # = comment in YAML title: '#selfiemotion' desc: Explore a range of emotional words or scenarios to elicit a spontaneous physical expression. suit: movement icon: src: img/cards/icon_52.svg link: https://thenounproject.com/icon/29576/ creator: Claire Jones --- # Supplies Requires camera or camera-phone # Instructions 1. Pose for a selfie or group selfie. 2. The photographer will call out an emotion or scenario (i.e. fear, happiness, or first day of work) and snap the picture. 3. Repeat three times with new emotions or scenarios. # Consider Reading a headline from the news to elicit a response.
31.666667
123
0.762406
eng_Latn
0.993744
dae224141847969836765be6ce436f9aaba19005
5,219
md
Markdown
extern/quadriflow/README.md
rbabari/blender
6daa85f14b2974abfc3d0f654c5547f487bb3b74
[ "Naumen", "Condor-1.1", "MS-PL" ]
365
2015-02-10T15:10:55.000Z
2022-03-03T15:50:51.000Z
extern/quadriflow/README.md
rbabari/blender
6daa85f14b2974abfc3d0f654c5547f487bb3b74
[ "Naumen", "Condor-1.1", "MS-PL" ]
45
2015-01-09T15:34:20.000Z
2021-10-05T14:44:23.000Z
extern/quadriflow/README.md
rbabari/blender
6daa85f14b2974abfc3d0f654c5547f487bb3b74
[ "Naumen", "Condor-1.1", "MS-PL" ]
172
2015-01-25T15:16:53.000Z
2022-01-31T08:25:36.000Z
# QuadriFlow: A Scalable and Robust Method for Quadrangulation Source code for the paper: Jingwei Huang, Yichao Zhou, Matthias Niessner, Jonathan Shewchuk and Leonidas Guibas. [**QuadriFlow: A Scalable and Robust Method for Quadrangulation**](http://stanford.edu/~jingweih/papers/quadriflow/quadriflow.pdf), The Eurographics Symposium on Geometry Processing (SGP) 2018. <!-- ## Processing Result --> ![QuadriFlow Results](https://github.com/hjwdzh/quadriflow/raw/master/img/result.jpg) ## WebGL Application Our 3D WebGL Apps for QuadriFlow are online! Without any installation, you are able to * [**Compare**](https://yichaozhou.com/publication/1805quadriflow/#demo) QuadriFlow with previous methods; * [**Quadrangulate**](https://yichaozhou.com/publication/1805quadriflow/#tool) your own meshes and download the result! ## Desktop Software The software supports cmake build for Linux/Mac/Windows systems. For linux and mac users, run **`sh demo.sh`** to build and try the QuadriFlow example, which converts `examples/Gargoyle_input.obj` to `examples/Gargoyle_quadriflow.obj`. ### Install ``` git clone git://github.com/hjwdzh/quadriflow cd quadriflow mkdir build cd build cmake .. -DCMAKE_BUILD_TYPE=release make -j ``` ### QuadriFlow Software We take a manifold triangle mesh `input.obj` and generate a manifold quad mesh `output.obj`. The face number increases linearly with the resolution controled by the user. ``` ./quadriflow -i input.obj -o output.obj -f [resolution] ``` Here, the resolution is the desired number of faces in the quad mesh. ## Advanced Functions ### Min-cost Flow By default, `quadriflow` uses the Boykov maximum flow solver from boost becuase it is faster. To enable the adaptive network simplex minimum-cost flow solver, you can enable the `-mcf` option: ``` ./quadriflow -mcf -i input.obj -o output.obj -f [resolution] ``` ### Sharp Preserving By default, `quadriflow` does not explicitly detect and perserve the sharp edges in the model. To enable this feature, uses ``` ./quadriflow -sharp -i input.obj -o output.obj -f [resolution] ``` ### SAT Flip Removal (Unix Only) By default, `quadriflow` does not use the SAT solver to remove the flips in the integer offsets map. To remove the flips and guarantee a watertight result mesh, you can enable the SAT solver. First, make sure that `minisat` and `timeout` is properly installed under your `${PATH}`. The former can be done by building `3rd/MapleCOMSPS_LRB/CMakeLists.txt` and copying `minisat` to `/usr/bin`. In addition, `timeout` is included in coreutils. If you are using Mac, you can install it using homebrew: ``` brew install coreutils export PATH="/usr/local/opt/coreutils/libexec/gnubin:$PATH" ``` You can verify if those binaries are properly installed by executing ``` which minisat which timeout ``` After that, you can enable SAT flip removal procedure by executing ``` ./quadriflow -sat -i input.obj -o output.obj -f [resolution] ``` When using the SAT flip removal, we also suggest you enabling the verbose logging to understand what is going on. You can build quadriflow with the following options: ``` cmake .. -DCMAKE_BUILD_TYPE=release -DBUILD_LOG=ON ``` ### GUROBI Support (For Benchmark Purpose) To use the Gurobi integer programming to solve the integer offset problem, you can build QuadriFlow with ``` cmake .. -DCMAKE_BUILD_TYPE=release -DBUILD_GUROBI=ON -DBUILD_LOG=ON ``` This override other solvers and should only be used for benchmark purpose. ## External Dependencies * Boost * Eigen * OpenMP (optional in CMake) * TBB (optional in CMake) * GUROBI (for benchmark purpose only) ## Licenses QuadriFlow is released under [MIT License](LICENSE.txt). For 3rd dependencies, * Boost and Lemon are released under [Boost Software License](https://lemon.cs.elte.hu/trac/lemon/wiki/License) * Most part of Eigen is released under [MPL2](https://www.mozilla.org/en-US/MPL/2.0/FAQ/) * Sparse Cholesky Decomposition algorithms are released under LGPL * To replace it using Sparse LU decomposition with a more permissive MPL2 license (a little slower), enable `BUILD_FREE_LICENSE` in CMake (e.g., `-DBUILD_FREE_LICENSE=ON`). * `pcg32.h` is released under the Apache License, Version 2.0 * `parallel_stable_sort.h` is released under the MIT License ## Authors - [Jingwei Huang](mailto:[email protected]) - [Yichao Zhou](mailto:[email protected]) &copy; 2018 Jingwei Huang and Yichao Zhou All Rights Reserved **IMPORTANT**: If you use this software please cite the following in any resulting publication: ``` @article {10.1111:cgf.13498, journal = {Computer Graphics Forum}, title = {{QuadriFlow: A Scalable and Robust Method for Quadrangulation}}, author = {Huang, Jingwei and Zhou, Yichao and Niessner, Matthias and Shewchuk, Jonathan Richard and Guibas, Leonidas J.}, year = {2018}, publisher = {The Eurographics Association and John Wiley & Sons Ltd.}, ISSN = {1467-8659}, DOI = {10.1111/cgf.13498} } ``` ## Triangle Manifold In case you are dealing with a triangle mesh that is not a manifold, we implemented the software that converts any triangle mesh to watertight manifold. Please visit https://github.com/hjwdzh/Manifold for details.
38.659259
279
0.754934
eng_Latn
0.968294
dae2bba84aa940b32cfcee689fe9a6c4ba35f0c0
3,046
md
Markdown
Patch_wang/Notes/X.md
dengV/wang_yu_patch
3abbaa6714fb0d73a395a3eb3ace36298f0cf0f8
[ "MIT" ]
null
null
null
Patch_wang/Notes/X.md
dengV/wang_yu_patch
3abbaa6714fb0d73a395a3eb3ace36298f0cf0f8
[ "MIT" ]
null
null
null
Patch_wang/Notes/X.md
dengV/wang_yu_patch
3abbaa6714fb0d73a395a3eb3ace36298f0cf0f8
[ "MIT" ]
null
null
null
## iOS 远端代码下发, 怎么搞 ? 一般大家都用 JSPatch? 用别人的,总不好。老王造了个轮子,我来推广一下。 ##### 老王 Patch, 挺先进的。JavaScript 代码, 采用 WebPack 打包。也参考了 JSPatch , 怎么设计这个 Patch? ### 从原理上 --- Patch 主要是 干什么的呢? 一般大公司的 App 在运行的过程当中,业务线非常复杂,可能会出现一些问题。 ### 这个时候,打补丁比较好 可以远端下发一个文件,开发的 App 通过 加载 这个文件, 实行 这个 代码 补丁。 #### 这个样子,就可以通过远端, 把这个代码, 在开发的 app 运行当中,给执行过来, OC 可以的,因为是动态语言,有 runtime ,所以才能搞这个 patch。 OC 有自己的消息转发流程。 Runtime 有 `objc_msgSend` 和 `_objc_msgForward` 。 Runtime 有这两个方法, 这两个函数特性,让所有的函数调用都会走这两个方法。 ##### 这样就可以干一些 patch 的事情。 ### 另外一点, 在 App 中植入了这个 patch ,他所使用的语言,能够被 eval 。 ### eval, 判断代码语句可以执行 这样 app 中有一个 context, 可以执行用于 patch 的语言。 <hr> 本文中的 iOS 补丁方案直接运用 这个 JavaScriptCore, 就是使用 JavaScriptCore 提供的 JSContext. 能让 JavaScript 语言 和 Objective-C 之间,有一个接口。然后就可以 JavaScript 与 Objective-C 相互调用了。 ( 苹果自己实现的 ) #### JSPatch 用到了 FFI,这个库老王 Patch 也使用了 FFI . FFI, Foreign Function Interface ,就是把一个语言暴露出来的接口,能让其他语言来调用。 Java 的 JNI 标准,与 FFI 比较相似。 FFI ,首先遵从 Coding Convention ,就是定义遵守一些调用的协议和规定。 为什么要有 FFI 呢? #### 举个 🌰: 我们执行一个函数调用,首先开辟一个栈帧,这个栈帧传递一些什么样的参数?参数的类型是什么?参数 的个数,是多少?包括这个函数里面,执行的一些动作是什么? FFI 就把遵守的一些调用的协议和规定定义出来。 <hr> 老王 Patch 库的这个自定义 FFI , 就是把想做的 patch 语言, 对接 Objective-C 的执行环境。 需要往里面传递的命令,把这个东西,给规定出来。 #### 这个样子, 在执行这个 Patch 的过程中,就可以按照这个命令,去发相应的消息,让相应的 context ,执行需要 patch 的代码。 #### FFI 就到这里了。 开启预编译。 使用 JSPatch 的过程当中,老王发现,用的非常的不顺手, 为什么他不顺手呢? 老王以为,JSPatch 把 JavaScript 硬生转换成相当于 Objective-C 这种风格的,补丁代码。 其间,还做了很多的语言处理,包括 Bang 说的源函数的处理。 包括 C 语言函数,类似 Ruby 的 Method Mission . 至于其他, 老王以为, JSPatch 都是在后端进行的。 ### JavaScript 的 逼真度 要加强Patch, 本身具有的语言的一个平滑性,就是写 JavaScript , 就使用 JavaScript 的写法 ### 引出了预编译的过程: 把 JavaScriptContext 的注册接口,抽象出三个层, #### define, 定义 define, 往 Objective-C 发消息。 不需要参数返回, 可以用 define . 需要定义的,肯定都自己定义的。 抽象出这一层,就可以了。 #### evaluate 需要一个返回值的时候,使用 去执行一些什么任务 #### Callback 老王感觉苹果封装的 JavaScriptContext,可能有一些问题。 比如说, 传递的一些函数对象,开发者封装了两层。那就可能获取不到这个对象了。 这个时候, 需要 Callback 方式,在那个执行环境当中,获取前端的 function 内容。 用 JavaScriptCore 相关的一种回调方式,来取到在当前 JavaScriptContext 环境 当中的一个 function 值。 #### 还有就是指令类型 这个 patch ,是干什么的? 对所使用的内容, 有哪些指令? * Patch 就是 执行 一些 Method 函数 的 一些 hook, * 会修改 一些 property 的值了。 * 会 做一些 block 方面的改动了。 * 访问 父类 super * 我们 可以 新增 函数,method_create 通过 这些指令,我们可以实现, 想要 patch 到的一些功能 #### 还有关键字转换:这个通过预处理来实现。要完成对一些关键字的处理。 对于 Objective-C 中的 self 关键字, 通常用来代表当前对象的指针。 还需要改 super 关键字, 为 oc_super. 因为 super 在 ECMAScript 6 里面, 也是关键字了,需要回避掉。 original, 是 Patch 中特有的。调用之前的函数,即打补丁之前的这个函数的调用。 #### 一些优化,举个 🌰: JS 写 高阶函数 对 JavaScript 高阶函数做一个平滑处理,否则可能写起来,非常费劲。 ``` a.request( function(a:id,b:Int):double{}, callback:(string,string) => int, (num1:string, num2:double) => { return num1 + num2; } ); ``` request 方法, 有三个参数。三个 function 类型的参数。第一个 function 函数,接收两个参数。第二个参数 callback, 是我们在函数调用上下文中取到的。第三个参数是, ECMAScript 6 中使用的 箭头函数。 最好呢,对这个进行一些支持。 这些操作,是通过预处理的方式, 编译成,能够对接 OC 的 Block 指令的。 然后,才能 正常 完成工作。 #### JSPatch 是很好的参考, 大家都仿照 JSPatch,在 JavaScriptContext 中注册一大堆自己要进行 patch 的函数。 想要用到什么,就去补充什么。 一般都是完全借鉴 Bang 的一些想法。 #### 最好呢? 要把 JSPatch 的这么一些想法,背后的东西,给抽象出来。 ### 缺点,没使用挺好用的 JSExport 协议, JSVirtualMachine Github 官方 repo 链接:https://github.com/wangyunico/iOSPatchBackend 我的民间 repo ,
23.612403
122
0.747866
yue_Hant
0.919962
dae2ed12ecb267c95104c3c6f9985eb1fad0081f
48
md
Markdown
CONTRIBUTING.md
mingo2000/bright-spoon
9d8158e384415c4a3ca8d93f6aa266f87a665cd0
[ "MIT" ]
null
null
null
CONTRIBUTING.md
mingo2000/bright-spoon
9d8158e384415c4a3ca8d93f6aa266f87a665cd0
[ "MIT" ]
4
2019-07-30T06:39:02.000Z
2019-07-30T10:32:54.000Z
CONTRIBUTING.md
mingo2000/bright-spoon
9d8158e384415c4a3ca8d93f6aa266f87a665cd0
[ "MIT" ]
null
null
null
# Welcome Thanks for contributing this project.
16
37
0.8125
eng_Latn
0.9806
dae2f934e65835ecb54acc11fd60af3926599f2f
130
md
Markdown
README.md
blackboxlaw/eth-warrants
9eaf0af8811dd9737106d70e9ab464b4307929e9
[ "MIT" ]
null
null
null
README.md
blackboxlaw/eth-warrants
9eaf0af8811dd9737106d70e9ab464b4307929e9
[ "MIT" ]
null
null
null
README.md
blackboxlaw/eth-warrants
9eaf0af8811dd9737106d70e9ab464b4307929e9
[ "MIT" ]
null
null
null
# exec-remuneration exchange for services or value ![alt text](http://onelaw.us/images/2020/logos-black/logo-blk-Warrants.png)
21.666667
75
0.761538
eng_Latn
0.59419
dae30cfbdd7c9dc42cc30474d673229bc1d419e8
760
md
Markdown
chapters/chapter09/README.md
jcscottiii/IntroToGithub
46db9aec7ff8789262a8d0f14e8cbcd392840a9a
[ "CC0-1.0" ]
null
null
null
chapters/chapter09/README.md
jcscottiii/IntroToGithub
46db9aec7ff8789262a8d0f14e8cbcd392840a9a
[ "CC0-1.0" ]
null
null
null
chapters/chapter09/README.md
jcscottiii/IntroToGithub
46db9aec7ff8789262a8d0f14e8cbcd392840a9a
[ "CC0-1.0" ]
null
null
null
# Clumsy Bird ![Clumsy Bird Screenshot](https://raw.githubusercontent.com/jcscottiii/IntroToGithubMaterials/master/img/clumsy-bird.png) ## Goal: Create your own version on your own website No coding experience needed. Just changing values. ## Fork the repository Go to [https://github.com/jcscottiii/clumsy-bird](https://github.com/jcscottiii/clumsy-bird) Click the `Fork` button at the top right. You need to make a change to deploy your own version. Check out this file: - `js/entities/entities.js` Look for phrases like: - `gravity` - `gravityForce` ## Competition - Show me your computer with your high score on your modified game. - Show me your code that you changed. - **ONE** Submission Restriction: Do **NOT** change the starting score.
23.030303
121
0.752632
eng_Latn
0.919457
dae3a3aa07bfce28d154897bf5b7190459a19030
5,388
md
Markdown
articles/azure-cache-for-redis/cache-python-get-started.md
changeworld/azure-docs.nl-nl
bdaa9c94e3a164b14a5d4b985a519e8ae95248d5
[ "CC-BY-4.0", "MIT" ]
null
null
null
articles/azure-cache-for-redis/cache-python-get-started.md
changeworld/azure-docs.nl-nl
bdaa9c94e3a164b14a5d4b985a519e8ae95248d5
[ "CC-BY-4.0", "MIT" ]
null
null
null
articles/azure-cache-for-redis/cache-python-get-started.md
changeworld/azure-docs.nl-nl
bdaa9c94e3a164b14a5d4b985a519e8ae95248d5
[ "CC-BY-4.0", "MIT" ]
null
null
null
--- title: 'Snelstartgids: een python-app maken-Azure cache voor redis' description: In deze Quick Start leert u hoe u een python-app maakt die gebruikmaakt van Azure cache voor redis. author: yegu-ms ms.author: yegu ms.service: cache ms.devlang: python ms.topic: quickstart ms.custom: - mvc - seo-python-october2019 ms.date: 11/05/2019 ms.openlocfilehash: 7b05f12ad3fd3a0f56605d708bbbf06df7e341ed ms.sourcegitcommit: f4f626d6e92174086c530ed9bf3ccbe058639081 ms.translationtype: MT ms.contentlocale: nl-NL ms.lasthandoff: 12/25/2019 ms.locfileid: "75433469" --- # <a name="quickstart-create-a-python-app-that-uses-azure-cache-for-redis"></a>Snelstartgids: een python-app maken die gebruikmaakt van Azure cache voor redis In dit artikel neemt u Azure-cache op voor redis in een python-app om toegang te hebben tot een beveiligde, toegewezen cache die toegankelijk is vanuit elke toepassing in Azure. ## <a name="prerequisites"></a>Vereisten - Azure-abonnement: [Maak er gratis een](https://azure.microsoft.com/free/) - [Python 2 of 3](https://www.python.org/downloads/) ## <a name="create-an-azure-cache-for-redis-instance"></a>Een instantie van Azure Cache voor Redis maken [!INCLUDE [redis-cache-create](../../includes/redis-cache-create.md)] [!INCLUDE [redis-cache-create](../../includes/redis-cache-access-keys.md)] ## <a name="install-redis-py"></a>Redis-py installeren [Redis-py](https://github.com/andymccurdy/redis-py) is een Python-interface voor Azure Cache voor Redis. Gebruik het hulp programma Python-pakketten, *PIP*, om het *redis-py-* pakket te installeren vanaf een opdracht prompt. In het volgende voor beeld gebruikt *PIP3* voor python 3 om *redis-py* op Windows 10 te installeren vanaf een opdracht prompt van de beheerder. ![Installeer de redis-py Python-interface naar Azure cache voor redis](./media/cache-python-get-started/cache-python-install-redis-py.png) ## <a name="read-and-write-to-the-cache"></a>Lezen en schrijven naar de cache Voer python uit vanaf de opdracht regel en test uw cache met behulp van de volgende code. Vervang `<Your Host Name>` en `<Your Access Key>` door de waarden uit uw Azure-cache voor redis-exemplaar. De hostnaam is van het formulier *\<DNS-naam >. redis. cache. Windows. net*. ```python >>> import redis >>> r = redis.StrictRedis(host='<Your Host Name>', port=6380, db=0, password='<Your Access Key>', ssl=True) >>> r.set('foo', 'bar') True >>> r.get('foo') b'bar' ``` > [!IMPORTANT] > Voor Azure cache voor redis versie 3,0 of hoger wordt de SSL-certificaat controle afgedwongen. ssl_ca_certs moet expliciet worden ingesteld bij het maken van verbinding met Azure cache voor redis. Voor RedHat Linux bevinden ssl_ca_certs zich in de */etc/PKI/TLS/certs/ca-Bundle.CRT* -certificaat module. ## <a name="create-a-python-sample-app"></a>Een python-voor beeld-app maken Maak een nieuw tekst bestand, voeg het volgende script toe en sla het bestand op als *PythonApplication1.py*. Vervang `<Your Host Name>` en `<Your Access Key>` door de waarden uit uw Azure-cache voor redis-exemplaar. De hostnaam is van het formulier *\<DNS-naam >. redis. cache. Windows. net*. ```python import redis myHostname = "<Your Host Name>" myPassword = "<Your Access Key>" r = redis.StrictRedis(host=myHostname, port=6380, password=myPassword, ssl=True) result = r.ping() print("Ping returned : " + str(result)) result = r.set("Message", "Hello!, The cache is working with Python!") print("SET Message returned : " + str(result)) result = r.get("Message") print("GET Message returned : " + result.decode("utf-8")) result = r.client_list() print("CLIENT LIST returned : ") for c in result: print("id : " + c['id'] + ", addr : " + c['addr']) ``` Voer *PythonApplication1.py* uit met python. Als het goed is, ziet u de resultaten zoals in het volgende voor beeld: ![Python-script uitvoeren om de cache toegang te testen](./media/cache-python-get-started/cache-python-completed.png) ## <a name="clean-up-resources"></a>Resources opschonen Als u klaar bent met de Azure-resource groep en de resources die u in deze Quick Start hebt gemaakt, kunt u ze verwijderen om kosten te voor komen. > [!IMPORTANT] > Het verwijderen van een resource groep is onomkeerbaar en de resource groep en alle resources hierin worden definitief verwijderd. Als u uw Azure-cache hebt gemaakt voor redis-exemplaar in een bestaande resource groep die u wilt blijven gebruiken, kunt u alleen de cache verwijderen door **verwijderen** te selecteren op de pagina **overzicht** van cache. De resource groep en de bijbehorende Redis Cache voor Azure-exemplaar verwijderen: 1. Zoek en selecteer in het [Azure Portal](https://portal.azure.com) **resource groepen**. 1. In het tekstvak **filteren op naam** voert u de naam in van de resource groep die uw cache-exemplaar bevat en selecteert u deze in de zoek resultaten. 1. Selecteer **Resourcegroep verwijderen** op de pagina van de resourcegroep. 1. Typ de naam van de resource groep en selecteer vervolgens **verwijderen**. ![De resource groep voor Azure cache voor redis verwijderen](./media/cache-python-get-started/delete-your-resource-group-for-azure-cache-for-redis.png) ## <a name="next-steps"></a>Volgende stappen > [!div class="nextstepaction"] > [Maak een eenvoudige ASP.NET-web-app die gebruikmaakt van Azure Cache voor Redis.](./cache-web-app-howto.md)
48.107143
358
0.746102
nld_Latn
0.990811
dae3cd2109183fadd9bd20d5956a3a9878795731
632
md
Markdown
chapter_11/exercises/03/README.md
reinvanimschoot/c-programming-a-modern-approach-solutions
7aabcd763d231cc0af3a250383a97a335c24ac6c
[ "CC-BY-4.0" ]
null
null
null
chapter_11/exercises/03/README.md
reinvanimschoot/c-programming-a-modern-approach-solutions
7aabcd763d231cc0af3a250383a97a335c24ac6c
[ "CC-BY-4.0" ]
null
null
null
chapter_11/exercises/03/README.md
reinvanimschoot/c-programming-a-modern-approach-solutions
7aabcd763d231cc0af3a250383a97a335c24ac6c
[ "CC-BY-4.0" ]
null
null
null
### Exercise 03 The following function supposedly computes the sum and average of the numbers in the array `a`, which has length `n`. `avg` and `sum` point to variables that the function should modify. Unfortunately, the function contains several errors; find and correct them. ```c void avg_sum(double a[], int n, double *avg, double *sum) { int i; sum = 0.0; for (i = 0; i < n; i++) sum += a[i]; avg = sum / n; } ``` ### Solution ```c void avg_sum(double a[], int n, double *avg, double *sum) { int i; *sum = 0.0; for (i = 0; i < n; i++) *sum += a[i]; *avg = *sum / n; } ```
18.588235
80
0.577532
eng_Latn
0.992652
dae4a20bac2ca8ff94fc85bd0055ad641db94ee8
5,833
md
Markdown
docsTools/downloaded_documents/Contrail Networking Monitoring and Troubleshooting Guide/contrail-logs-vnc.md
pjrusak/docs
20acc3ddd3184670b92662aeec3685bd69bd5247
[ "CC-BY-4.0" ]
14
2018-05-16T01:01:04.000Z
2022-03-13T06:31:11.000Z
docsTools/downloaded_documents/Contrail Networking Monitoring and Troubleshooting Guide/contrail-logs-vnc.md
pjrusak/docs
20acc3ddd3184670b92662aeec3685bd69bd5247
[ "CC-BY-4.0" ]
88
2018-09-06T18:07:28.000Z
2021-07-05T10:07:21.000Z
docsTools/downloaded_documents/Contrail Networking Monitoring and Troubleshooting Guide/contrail-logs-vnc.md
pjrusak/docs
20acc3ddd3184670b92662aeec3685bd69bd5247
[ "CC-BY-4.0" ]
22
2018-05-16T01:01:06.000Z
2022-01-06T03:41:27.000Z
# contrail-logs (Accessing Log File Messages)   <div id="intro"> <div class="mini-toc-intro"> A command-line utility, `contrail-logs`, uses REST APIs to retrieve system log messages, object log messages, and trace messages. </div> </div> ## Command-Line Options for Contrail-Logs The command-line utility for accessing log file information is `contrail-logs` in the analytics node. The following are the options supported at the command line for `contrail-logs`, as viewed using the ` -–help` option. <div id="jd0e45" class="example" dir="ltr"> [root@host]# contrail-logs --help usage: contrail-logs [-h] [--opserver-ip OPSERVER_IP] [--opserver-port OPSERVER_PORT] [--start-time START_TIME] [--end-time END_TIME] [--last LAST] [--source SOURCE] [--module {ControlNode, VRouterAgent, ApiServer, Schema, OpServer, Collector, QueryEngine, ServiceMonitor, DnsAgent}] [--category CATEGORY] [--level LEVEL] [--message-type MESSAGE_TYPE] [--reverse] [--verbose] [--all] [--object {ObjectVNTable, ObjectVMTable, ObjectSITable, ObjectVRouter, ObjectBgpPeer, ObjectRoutingInstance, ObjectBgpRouter, ObjectXmppConnection, ObjectCollectorInfo, ObjectGeneratorInfo, ObjectConfigNode}] [--object-id OBJECT_ID] [--object-select-field {ObjectLog,SystemLog}] [--trace TRACE] </div> ## Option Descriptions The following are the descriptions for each of the option arguments available for `contrail-logs`. <div id="jd0e56" class="example" dir="ltr"> optional arguments: -h, --help show this help message and exit --opserver-ip OPSERVER_IP IP address of OpServer (default: 127.0.0.1) --opserver-port OPSERVER_PORT Port of OpServer (default: 8081) --start-time START_TIME Logs start time (format now-10m, now-1h) (default: now-10m) --end-time END_TIME Logs end time (default: now) --last LAST Logs from last time period (format 10m, 1d) (default: None) --source SOURCE Logs from source address (default: None) --module {ControlNode, VRouterAgent, ApiServer, Schema, OpServer, Collector, QueryEngine, ServiceMonitor, DnsAgent} Logs from module (default: None) --category CATEGORY Logs of category (default: None) --level LEVEL Logs of level (default: None) --message-type MESSAGE_TYPE Logs of message type (default: None) --reverse Show logs in reverse chronological order (default: False) --verbose Show internal information (default: True) --all Show all logs (default: False) --object {ObjectVNTable, ObjectVMTable, ObjectSITable, ObjectVRouter, ObjectBgpPeer, ObjectRoutingInstance, ObjectBgpRouter, ObjectXmppConnection, ObjectCollectorInfo, ObjectGeneratorInfo, ObjectConfigNode} Logs of object type (default: None) --object-id OBJECT_ID Logs of object name (default: None) --object-select-field {ObjectLog,SystemLog} Select field to filter the log (default: None) --trace TRACE Dump trace buffer (default: None) </div> ## Example Uses The following examples show how you can use the option arguments available for `contrail-logs` to retrieve the information you specify. 1. View only the system log messages from all boxes for the last 10 minutes. `contrail-logs` 2. View all log messages (systemlog, objectlog, uve, ...) from all boxes for the last 10 minutes. `contrail-logs --all` 3. View only the control node system log messagess from all boxes for the last 10 minutes. `contrail-logs --module ControlNode` `--module` accepts the following values - `ControlNode, VRouterAgent, ApiServer, Schema, ServiceMonitor, Collector, OpServer, QueryEngine, DnsAgent` 4. View the control node system log messages from source `a6s23.contrail.juniper.net `for the last 10 minutes. `contrail-logs --module ControlNode --source a6s23.contrail.juniper.net` 5. View the XMPP category system log messages from all modules on all boxes for the last 10 minutes. `contrail-logs --category XMPP` 6. View the system log messages from all the boxes from the last hour. `contrail-logs --last 1h` 7. View the system log messages from the VN object named `demo:admin:vn1` from all boxes for the last 10 minutes. `contrail-logs --object ObjectVNTable --object-id demo:admin:vn1 ` `--object `accepts the following values - `ObjectVNTable, ObjectVMTable, ObjectSITable, ObjectVRouter, ObjectBgpPeer, ObjectRoutingInstance, ObjectBgpRouter, ObjectXmppConnection, ObjectCollectorInfo` 8. View the system log messages from all boxes for the last 10 minutes in reverse chronological order: `contrail-logs --reverse` 9. View the system log messages from a specific time interval and display them in a specified date format. `contrail-logs --start-time "2020 May 12 18:30:27.0" --end-time "2020 May 12 18:31:27.0"`  
38.886667
233
0.606892
eng_Latn
0.746492
dae5335c20f6a527871ef4cefc663ee579b03f8c
786
md
Markdown
README.md
benfletcher/aoc-2018-ts
a261ba0fef666ecc7c1cb73a38c7fedcf7d5bdce
[ "MIT" ]
null
null
null
README.md
benfletcher/aoc-2018-ts
a261ba0fef666ecc7c1cb73a38c7fedcf7d5bdce
[ "MIT" ]
1
2018-12-12T23:30:07.000Z
2018-12-12T23:30:07.000Z
README.md
benfletcher/aoc-2018-ts
a261ba0fef666ecc7c1cb73a38c7fedcf7d5bdce
[ "MIT" ]
null
null
null
Repository for solutions to select Advent of Code 2018 problems, written primarily (solely?) in Typescript using the TDD red-green-refactor approach. ### Edit and Run Write your program using `src/index.ts` as the entry point, and run it. ```bash $ npm run start ``` Individual problems will be split out into standalone modules. 👮 All the test need to pass in order to make a `git push`. #### How to TDD: Create a test file next to the file you want to test, using the nomenclature `<file-name>.test.ts`. ```bash $ npm run tdd ``` Based on [TypeScript Node App Starter](https://github.com/acamica/typescript-node-starter). See its [`readme.md`](https://github.com/acamica/typescript-node-starter/blob/master/README.md) for installation instructions, list of contributors, etc.
34.173913
245
0.746819
eng_Latn
0.922095
dae6ddd5197b83161aae3704003ca02726eb6734
1,093
md
Markdown
_posts/2018-05-03-faster.md
lighteningstime/lighteningsblog
8911677ea63573b3c8f92721468e6b80c68323db
[ "MIT" ]
null
null
null
_posts/2018-05-03-faster.md
lighteningstime/lighteningsblog
8911677ea63573b3c8f92721468e6b80c68323db
[ "MIT" ]
2
2019-12-25T20:04:48.000Z
2019-12-25T20:04:49.000Z
_posts/2018-05-03-faster.md
lighteningstime/lighteningstime.github.io
8911677ea63573b3c8f92721468e6b80c68323db
[ "MIT" ]
null
null
null
--- layout: post title: Faster date: 2018-05-03 --- Maybe it's the sugar, that can be one vector for depression. Which would corroborate with those, sugar causes inflammation causes depression. Maybe that means tylenol would help if or when it ever hits again. It has been a few days, and the feeling is starting to pass. Maybe the lack of donuts is contributing to that, maybe not. I think in the end, its the feeling of being stuck. That the depressive cloud points out that, "Hey, you're not trying things that scare you, and you've settled into a little comfort bubble" Maybe that is just what I need. I finally started my week-long fast. Or hopefully it lasts a week. We'll break Tuesday maybe. If things go well, Technically, it would be wednesday dinner to thursday breakfast/lunch to do a full week. Shakou for sure. I can taste it already. If we feel really shitty, maybe before wednesday's volleyball game. Here's to a good day 1. Hunger pangs are still muted, but still present, and the weakness and faintness. It's decent as long as I'm not exercising very hard. Let's do this
72.866667
313
0.772187
eng_Latn
0.999879
dae6e6e14bbc785776966935cd400ce83cb9981d
75
md
Markdown
README.md
AlgoSenpai/LocalNetworkSearch
fb0a5f3167dc98711b62ebc8f6803fb06647f802
[ "Unlicense" ]
null
null
null
README.md
AlgoSenpai/LocalNetworkSearch
fb0a5f3167dc98711b62ebc8f6803fb06647f802
[ "Unlicense" ]
null
null
null
README.md
AlgoSenpai/LocalNetworkSearch
fb0a5f3167dc98711b62ebc8f6803fb06647f802
[ "Unlicense" ]
null
null
null
# LocalNetworkSearch Search the computers connected to your local network.
25
53
0.84
eng_Latn
0.999301
dae6f6012a433aa102ef9aa08138dac49e22133e
789
md
Markdown
.github/ISSUE_TEMPLATE.md
oblanchet/sol2
6283d3c2be81ef222318f1d463b69b5f90d6dea5
[ "MIT" ]
3,193
2016-01-28T16:23:41.000Z
2022-03-31T17:58:23.000Z
.github/ISSUE_TEMPLATE.md
oblanchet/sol2
6283d3c2be81ef222318f1d463b69b5f90d6dea5
[ "MIT" ]
1,268
2016-02-01T09:00:51.000Z
2022-03-27T21:38:53.000Z
.github/ISSUE_TEMPLATE.md
oblanchet/sol2
6283d3c2be81ef222318f1d463b69b5f90d6dea5
[ "MIT" ]
453
2016-02-24T15:34:51.000Z
2022-03-30T20:18:56.000Z
Read these guidelines. They are relatively simple and will allow me to help you better! For Error Reports: 1. Produce a simple, short, compilable test case that reproduces your problem. 2. Make a descriptive title that summarises the bug as a whole. 3. Explain the bug in as much detail as you can in the body of the issue. 4. Include Compiler/IDE (Visual Studio, XCode...), Build and Deployment System, Language (C++, Objective-C++), and any special defines you have set. If you want to request a feature: 1. Produce any relevant imaginary code that illustrates what you would like or desired behavior. 2. Include a description and title of what you would like. 3. Annotate and describe the behavior through comments, asserts or just a small write up. Thanks for helping sol2 grow!
46.411765
148
0.774398
eng_Latn
0.999795
dae7d990e58e54833a00b0cfd0dc7efba74bc4d4
2,698
md
Markdown
README.md
reviewed/feralchimp
ed818cc45a1e3c41f0fee173529fbeb56658f0d3
[ "MIT" ]
null
null
null
README.md
reviewed/feralchimp
ed818cc45a1e3c41f0fee173529fbeb56658f0d3
[ "MIT" ]
null
null
null
README.md
reviewed/feralchimp
ed818cc45a1e3c41f0fee173529fbeb56658f0d3
[ "MIT" ]
null
null
null
# Feralchimp Feralchimp is a Ruby based API wrapper for the MailChimp API v2.0. ## Installation: ```ruby gem "feralchimp", github: "reviewed/feralchimp" ``` ## Options: * Feralchimp.api_key = Mailchimp key w/ region part. * *You can also optionally set ENV["MAILCHIMP_API_KEY"] too* * Feralchimp.timeout = *Defaults:* 5 There is one setter called `exportar` (Spanish for export) that is a public but private API method so that the class can communicate with the instance when a user chains using `export`. This variable is always reset back to false each time `#call` is called. While it won't hurt anything if you play with it (such as setting it to true,) just be warned it's internal and it's state is always reset even if it's already false and setting it to any value but false or nil will just result in you hitting the Export API. ## Normal API Usage: ```ruby Feralchimp.api_key = api_key Feralchimp.new.lists #=> {} Feralchimp.lists #=> {} Feralchimp.new(:api_key => api_key).lists # => {} ``` Using the class creates a new instance each run but you also have the option to create your own persistant instance so you can control key state. When creating a new instance you can send an optional api key which will be set for that instance only. ## Export API Usage: ```ruby Feralchimp.new.export.list(:id => list_id) #=> [{}] Feralchimp.export.list(:id => list_id) #=> [{}] Feralchimp.new(:api_key => api_key).export.list(:id => list_id) ``` According to the Mailchimp spec it will send a plain text list of JSON arrays delimited by `\n`, with the first JSON array being the header (AKA the hash keys) keeping in line with this we actually parse this list for you, in that we take the first JSON array and zip it into an array, like so: ```ruby # What Mailchimp gives us: # ["header1", "header2"] # ["array1_v1", "array1_v2"] # ["array2_v1", "array2_v2"] # What we give you: [ {"header1" => "array1_v1", "header2" => "array1_v2" } {"header1" => "array2_v1", "header2" => "array2_v2" } ] ``` This means that to work with the Export API you need do nothing more special than you already do because we handle all the hard work, if you can call it hard work considering it required very little extra code. ## API Payloads ```ruby Feralchimp.new.list_members(:id => list_id) Feralchimp.list_members(:id => list_id) Feralchimp.new(:api_key => api_key).list_members(:id => list_id) ``` Feralchimp accepts a hash based payload. This payload is not tracked by us and all we do is transform it and post it so if you would like to know more about what payloads you might need to send to Mailchimp please visit the [Mailchimp API docs](http://apidocs.mailchimp.com/api/2.0/).
40.878788
518
0.73017
eng_Latn
0.994566
dae85ebae6a4f19494b464e723beb81fe16e8921
1,244
md
Markdown
README.md
devWRM/million_dollar_idea
a9d08b4e301f6c9d9b10b2fb63da7da1547ac5ee
[ "MIT" ]
null
null
null
README.md
devWRM/million_dollar_idea
a9d08b4e301f6c9d9b10b2fb63da7da1547ac5ee
[ "MIT" ]
null
null
null
README.md
devWRM/million_dollar_idea
a9d08b4e301f6c9d9b10b2fb63da7da1547ac5ee
[ "MIT" ]
null
null
null
## Description Million Dollar Idea is for people who want to stop dreaming and actually start pursuing their ideas. You can have a million ideas, but you only need one idea to make millions. Use Million Dollar Idea to capture, track, and plan to make your idea become a million dollar success. Million Dollar Idea allows users to: - signup - login - create ideas - add resources to their ideas - see a list of all of their idea titles - see details of a specific idea by clicking on the idea title link - edit their own ideas - delete their own ideas - see all categories - see all categories they've created ideas for - see all resources see all resources for each of their ideas - see ideas, resources, and categories for other users ## Usage Visit the website on Heroku here: [link coming soon] ## Contributing Bug reports and pull requests are welcome on GitHub at https://github.com/devWRM/million_dollar_idea. This project is intended to be a safe, welcoming space for collaboration, and contributors are expected to adhere to the [code of conduct] (https://github.com/devWRM/quote_generator/blob/master/CODE_OF_CONDUCT.md). ## License Open source under the terms of the [MIT License](https://opensource.org/licenses/MIT).
36.588235
316
0.77492
eng_Latn
0.997161
dae8b80be101487e6b9e2033ff8579ba9b162d99
793
md
Markdown
README.md
sdq/rrule.swift
0466cbce95f8e66795b4f020eaa200bfafbd2460
[ "MIT" ]
46
2016-07-19T02:27:39.000Z
2022-01-27T04:24:32.000Z
README.md
sdq/rrule.swift
0466cbce95f8e66795b4f020eaa200bfafbd2460
[ "MIT" ]
4
2016-11-10T16:55:51.000Z
2020-10-21T09:21:25.000Z
README.md
sdq/rrule.swift
0466cbce95f8e66795b4f020eaa200bfafbd2460
[ "MIT" ]
15
2016-07-21T17:41:00.000Z
2022-03-23T08:05:53.000Z
# rrule.swift **rrule.swift** supports recurrence rules in Swift 4 (No other 3rd-party dependencies). It is a partial port of the rrule module from the excellent [python-dateutil](http://labix.org/python-dateutil/) library. Demo ------ ![demo](https://github.com/sdq/rrule.swift/blob/master/rruledemo.jpg) How to use ------ Drag **rrule.swift** into your project. let rule = rule(frequency, dtstart: dtstart, until: until, count: count, interval: interval, wkst: wkst, bysetpos: bysetpos, bymonth: bymonth, bymonthday: bymonthday, byyearday: byyearday, byweekno: byweekno, byweekday: byweekday) let occurrences = rule.getOccurrences() To do ------ * Hourly * Minutely * Secondly Author ------ [sdq](http://shidanqing.net) License ------- [MIT](https://opensource.org/licenses/MIT)
25.580645
231
0.718789
eng_Latn
0.838187
dae8bf5daa10bb8968637fe5d33d81ac6d85217b
626
md
Markdown
extension/healthcheckextension/README.md
blakeroberts-wk/opentelemetry-collector
866143b1a71f1747f4d099f3571d08e2e2f6e2bb
[ "Apache-2.0" ]
694
2019-10-05T13:54:46.000Z
2022-03-31T04:55:08.000Z
extension/healthcheckextension/README.md
blakeroberts-wk/opentelemetry-collector
866143b1a71f1747f4d099f3571d08e2e2f6e2bb
[ "Apache-2.0" ]
7,217
2019-10-05T03:24:34.000Z
2022-03-31T22:17:45.000Z
extension/healthcheckextension/README.md
blakeroberts-wk/opentelemetry-collector
866143b1a71f1747f4d099f3571d08e2e2f6e2bb
[ "Apache-2.0" ]
851
2019-10-19T00:08:04.000Z
2022-03-31T16:39:05.000Z
# Health Check Health Check extension enables an HTTP url that can be probed to check the status of the OpenTelemetry Collector. This extension can be used as a liveness and/or readiness probe on Kubernetes. The following settings are required: - `endpoint` (default = 0.0.0.0:13133): Address to publish the health check status to - `port` (default = 13133): [deprecated] What port to expose HTTP health information. Example: ```yaml extensions: health_check: ``` The full list of settings exposed for this exporter is documented [here](./config.go) with detailed sample configurations [here](./testdata/config.yaml).
29.809524
85
0.765176
eng_Latn
0.993572
dae9044ccd1e5ec73d387ffaa888641e30872c1d
8,610
md
Markdown
RELEASE-NOTES.md
trentlarson/uport-connect
cab4058544c8305ae8798a8c35d8d80e64e0afbc
[ "Apache-2.0" ]
329
2017-02-12T20:53:28.000Z
2022-02-23T09:31:15.000Z
RELEASE-NOTES.md
trentlarson/uport-connect
cab4058544c8305ae8798a8c35d8d80e64e0afbc
[ "Apache-2.0" ]
269
2017-02-01T17:14:45.000Z
2021-05-10T10:33:32.000Z
RELEASE-NOTES.md
trentlarson/uport-connect
cab4058544c8305ae8798a8c35d8d80e64e0afbc
[ "Apache-2.0" ]
112
2017-02-16T15:27:29.000Z
2021-10-31T14:29:50.000Z
# Release notes ## Version 1.1.0 ### Personal Sign Flow The uPort mobile app now supports "personal sign" functionality, and there is a new message to make such a request to a uPort mobile app. For `uport-connect`, support for this feature includes: * New method `Connect.requestPersonalSign(data, id, sendOpts)` which creates and sends a personal sign request message to a mobile app. Its response may be listened for with `Connect.onResponse` as all other messages. * Support for `personal_sign` RPC call (invoked by `web3.personal.sign`, e.g.) in `UportSubprovider`, via the above method on `Connect` ### Typed Data (ERC712) Signature Flow The uPort mobile app also supports the new `eth_signTypedData` RPC call defined by the [EIP712 Specification](https://github.com/ethereum/EIPs/blob/master/EIPS/eip-712.md). Correspondingly, this library now includes: * New method `Connect.requestTypeDataSignature(typedData, id, sendOpts)`, which creates and sends a typed data signature request message to a mobile app. Its response may be listened for with `Connect.onResponse` as all other messages. * Support for `eth_signTypedData` and `eth_signTypedData_v3` RPC calls in `UportSubprovider`, via the above method on `Connect` ### Simple App Profiles It's now possible to include a list of JWTs to better identify an application making a request via a new property `vc`. In particular, a JWT in the form of an "app profile" has semantic meaning to a mobile app, and will be displayed along with the request card. This app profile can contain any subset of the following five fields which are recognized by the uPort Mobile App: | Key | Type | Description | |--------------|--------|-------------| |`name` |`String`| Application name| |`description` |`String`| Description of application | |`url` |`String`| URL from which application is being served| |`profileImage`|IPLD Link| Foreground image to display in requests to mobile app| |`bannerImage` |IPLD Link| Background image to display in requests to mobile app | In particular for `uport-connect`, this message will be set up as follows: * A `Connect` instance can be instantiated with a `vc` option, which is a list of JWTs or IPFS hashes (in the form `/ipfs/${hash}`), which will be passed along with every request from that instance. * If no `vc` argument is supplied, the first request from the instance will sign and upload to IPFS a JWT identifying the app's name, the URL from which it has been served, and any other supplied keys of the profile claim described above. This will become the only entry of the `vc` array, and be passed along with every request. --------------------------------------------------------------------------------------------------------------- ## Version 1.0 With the release of uPort Connect `v1.0.0`, there are a number of changes to our API -- the main differences to watch out for are described in this document, and the full API reference can be found [here](https://developer.uport.me/uport-connect/reference/index). ### `ConnectCore` -> `Connect` First, on the module level, there is no longer a `ConnectCore` class. All core functionality is now implemented by the `Connect` object, instantiated as `new Connect(appName, opts)`. Supplemental "transports" which facilitate communcation with the mobile app have moved to a new repository, [`uport-transports`](https://github.com/uport-project/uport-transports). The transports used in `Connect` are configurable, and you also have the option of providing custom transports in the constructor. See the `transport`, `pushTransport`, and `mobileTransport` options in the configuration object. ### No public keys in the browser There was previously confusion about how to keep private keys safe when `Connect` required its own keypair in order to sign messages. To aleviate this, we no longer require that `Connect` is instantiated with a `privateKey` or `signer`; instead, when a `Connect` is instantiated for the first time on a particular site, it generates a new keypair in the browser to serve as the *instance*'s identity. This is the identity that will sign requests to a mobile app, and the mobile app user can confirm that subsequent requests come from the same identity. It is still the case that signing a claim with a particular unique identity (which may belong to your application or company) requires that the keypair for that identity be stored somewhere secure (such as a server), and client ### `localStorage` Persistance As mentioned above, the keypair that is created on construction of the `Connect` object is saved to localStorage, and is used to re-instantiate the object with the same keypair when the site is reloaded. Additionally, the `did` and `address` of the most recently authenticated user, and any verified claims they have shared with the application are persisted in localStorage, so that they need not be requested again when a user revisits the page. Note that this does not share the information with any external service, and is intended to allow applications to maintain something akin to a session without storing a user's information on a server. For more information about controlling the persistance behavior of `Connect`, see the API [reference](https://developer.uport.me/uport-connect/reference/index) ### New functions `logout`, `reset` To clear all saved data about a user from the browser, use the `logout()` method. To additionally destroy the keypair, and so the application's identity, use `reset()`. Note that following a reset, the user will be prompted to create a new identity in the mobile app upon the next interaction, and will not be able to associate the new browser identity with the old. ### `mnid`, `address`, `did` With v1.0, we have changed our underlying architecture to use [Decentralized Identifiers](https://w3c-ccg.github.io/did-spec/) (DIDs) as our primary ID. We retain support for old identities via the `did:uport:` did method, while new accounts are created using the `ethr:did:` did method. The `did` of the currently authenticated user is readable from a connect instance as `connect.did`. The `address` field now returns the ethereum address of the currently authenticated user, and the `mnid` field is an encoding of the `address` along with the network id, described further [here](https://github.com/uport-project/mnid). ### `<request>.then()` -> `onResponse(requestId).then()` In order to address issues that can arise with page reloading when switching between mobile browsers and the uPort app, this release decouples the concepts of *requests* and *responses*. Where previously a request would return a promise which would resolve when the response was available, now each request requires a `requestId`, which is then used to listen for the response. This is a much more powerful pattern, that allows for more customized handling of requests and responses potentially on different pages of an app, and the request flow itself is stateless with respect to the browser. ### `requestAddress` -> `requestDisclosure` The `requestAddress` function has been removed, and `address` and `did` are returned by default for all disclosure requests. Use `requestDisclosure()` instead. ### `attestCredentials` -> `sendVerification` Renamed to make names more consistent across our libraries. ### `request` -> `send` This is the function that sends a pre-signed JWT to the mobile app using the appropriate transport. It was renamed to clarify it's role as the function that actually invokes the transports. ### `(new Connect(appName, {provider})).getProvider()` -> `connect.getProvider(provider)` By default, `uport-connect` now uses `ethjs` as our base web3 provider. To pass a different base provider onto which uport functionality should be applied, pass the provider instance to the `getProvider` instance method, and the returned `UportSubprovider` will wrap the given provider. **Note:** some providers may not play nicely with the added uport functionality. ### `connect.getWeb3` removed To reduce bundle size for those who do not need it, we no longer bundle `web3` with `uport-connect`. To get a `web3` object configured with uPort functionality, created a new `web3` instance with the `UportSubprovider` returned by `getProvider`, i.e. ```javascript const web3 = new Web3(connect.getProvider()) ``` --------------------------------------------------------------------------------------------------------------- ## Version 0.7.0 * Support for encrypted push notifications * New QR-code modals * Updated documentation
111.818182
811
0.749361
eng_Latn
0.999021
dae963846cb1dfecd08357ef4b6eb68bfafb2353
576
md
Markdown
Nihal cse-2nd year/README.md
dsc-akgec/probation-projects
2f3d6a16cdcf06c88e8fa151d75935013fbc2859
[ "MIT" ]
3
2019-11-05T17:56:19.000Z
2019-11-07T05:04:05.000Z
Nihal cse-2nd year/README.md
dsc-akgec/probation-projects
2f3d6a16cdcf06c88e8fa151d75935013fbc2859
[ "MIT" ]
null
null
null
Nihal cse-2nd year/README.md
dsc-akgec/probation-projects
2f3d6a16cdcf06c88e8fa151d75935013fbc2859
[ "MIT" ]
3
2019-11-06T14:01:03.000Z
2019-11-06T19:30:37.000Z
# Nihal Dataset is downloaded from kaggle(goodreadbooks). Recommendation is provided for user: -for similar books, -for partial names. Analysis is completely based on attributes provided in dataset. Topic modeling is done using clustering algorithms(k means). In screenshots, #Starting images shows the graphs of dataset analysis. #Secondlast image shows the clusters with k=5. #Last image shows the Recommendation example of partial names searched by any user. These graphs and recommendation during searching is the main outcome of this project.
32
85
0.777778
eng_Latn
0.998453
dae99b593851024b64f00cf460df0ab72ec3a39c
133
md
Markdown
content/releasenotes/sdk/index.md
Grasshooper/docs
d1dfbd9804e77df31359664157baa4515c4b19a9
[ "CC-BY-4.0" ]
null
null
null
content/releasenotes/sdk/index.md
Grasshooper/docs
d1dfbd9804e77df31359664157baa4515c4b19a9
[ "CC-BY-4.0" ]
null
null
null
content/releasenotes/sdk/index.md
Grasshooper/docs
d1dfbd9804e77df31359664157baa4515c4b19a9
[ "CC-BY-4.0" ]
null
null
null
--- title: "SDKs" --- This category includes release notes for both the [Model SDK](model-sdk) and the [Platform SDK](platform-sdk)
22.166667
109
0.714286
eng_Latn
0.997383
dae9b1ad2535a9b91523149e4f40277a2af63335
651
md
Markdown
doc/vue-pdfjs-series.md
rossta/rossta.github.com
0e657493bdad7f67c8a7f19eed3d7ad5a0e5ef64
[ "MIT" ]
21
2016-07-23T21:54:34.000Z
2021-06-22T17:13:09.000Z
doc/vue-pdfjs-series.md
rossta/rossta.github.com
0e657493bdad7f67c8a7f19eed3d7ad5a0e5ef64
[ "MIT" ]
14
2016-08-25T10:44:29.000Z
2022-02-26T01:14:13.000Z
doc/vue-pdfjs-series.md
rossta/rossta.github.com
0e657493bdad7f67c8a7f19eed3d7ad5a0e5ef64
[ "MIT" ]
4
2016-07-26T15:14:30.000Z
2018-12-20T13:01:35.000Z
1. Intro to PDF.js 1. Sample PDF.js creation 1. Simple Vue PDF 1. Adding Functionality 1. Zoom controls and resizing - Manipulating viewport and css - Loading PDF at appropriate scale 1. Adding paginator - Jump to page on setting number - Set number on scroll page focus - Extract directive? 1. On-Demand rendering - Detecting visibility using boundaries - Extract directive? 1. Buffering - loading and mounting on scroll/jump - Updating page focus, scale on first load - Vue: watching mutating array of pages 1. Uploading to Filestack, updating pdf/pages - Resetting default scales, focused page
31
47
0.72043
eng_Latn
0.59669
daeaade025262c980d9bf9b07062f9090e928029
3,173
md
Markdown
README.md
gotuna/gotuna
12fdb1d49488da74fbec5b7e2fe401c485cca10a
[ "MIT" ]
39
2021-04-09T09:53:25.000Z
2022-01-24T09:43:33.000Z
README.md
gotuna/gotuna
12fdb1d49488da74fbec5b7e2fe401c485cca10a
[ "MIT" ]
6
2021-04-19T09:56:47.000Z
2021-09-03T07:25:16.000Z
README.md
gotuna/gotuna
12fdb1d49488da74fbec5b7e2fe401c485cca10a
[ "MIT" ]
7
2021-04-20T02:12:13.000Z
2022-01-24T09:43:37.000Z
<p align="center"> <img src="https://avatars.githubusercontent.com/u/82163094?s=200&v=4"> </p> <p align="center"> <a href="https://pkg.go.dev/github.com/gotuna/gotuna"><img src="https://pkg.go.dev/badge/github.com/gotuna/gotuna" alt="PkgGoDev"></a> <a href="https://github.com/gotuna/gotuna/actions"><img src="https://github.com/gotuna/gotuna/workflows/tests/badge.svg" alt="rests status" /></a> <a href="https://goreportcard.com/report/github.com/gotuna/gotuna"><img src="https://goreportcard.com/badge/github.com/gotuna/gotuna" alt="Go Report Card" /></a> <a href="https://codecov.io/gh/gotuna/gotuna"><img src="https://codecov.io/gh/gotuna/gotuna/branch/main/graph/badge.svg?token=QG7CG4MSPC" alt="Go Report Card" /></a> </p> # GoTuna - Web framework for Go GoTuna a lightweight web framework for Go with mux router, middlewares, user sessions, templates, embedded views, and static file server. Please visit [https://gotuna.org](https://gotuna.org) for the latest documentation, examples, and more. # Features - Router (gorilla) - Standard `http.Handler` interface - Middleware support - User session management (gorilla) - Session flash messages - Native view rendering (html/template) with helpers - Static file server included with the configurable prefix - Standard logger interface - Request logging and panic recovery - Full support for embedded templates and static files - User authentication (via user provider interface) - Sample InMemory user provider included - Multi-language support - Database agnostic # Requirements - Make sure you have Go >= 1.16 installed # Quick Start Initialize new app and install GoTuna: ```shell mkdir testapp cd testapp go get -u github.com/gotuna/gotuna ``` Now create two files `main.go` and `app.html` as an example: ```go // main.go package main import ( "fmt" "net/http" "os" "github.com/gotuna/gotuna" ) func main() { app := gotuna.App{ ViewFiles: os.DirFS("."), Router: gotuna.NewMuxRouter(), } app.Router.Handle("/", handlerHome(app)) app.Router.Handle("/login", handlerLogin(app)).Methods(http.MethodGet, http.MethodPost) fmt.Println("Running on http://localhost:8888") http.ListenAndServe(":8888", app.Router) } func handlerHome(app gotuna.App) http.Handler { return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { app.NewTemplatingEngine(). Render(w, r, "app.html") }) } func handlerLogin(app gotuna.App) http.Handler { return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { fmt.Fprintf(w, "Login form...") }) } ``` This will be your app's html layout: ```html // app.html {{- define "app" -}} <!DOCTYPE html> <html> <head></head> <body> <a href="/login">Please login!</a> </body> </html> {{- end -}} ``` Run this simple app and visit http://localhost:8888 in your browser: ```shell go run main.go ``` # Running example apps GoTuna comes with an example app. Make sure you have git and Go >= 1.16 installed. ```shell git clone https://github.com/gotuna/gotuna.git cd gotuna go run examples/fullapp/cmd/main.go ``` # Testing ```shell go test -race -v ./... ``` # Licence This project is licensed under the MIT License.
24.984252
165
0.714466
kor_Hang
0.352141
daeaf197e9b40bc8d7f42575d7ac0d6621020131
1,648
md
Markdown
docs/zh-CN/sql-reference/sql-statements/Administration/CREATE CLUSTER.md
kaiker19/incubator-doris
f4c5c6ccc650012a0db7ddda8a38f4c65cc5c9be
[ "Apache-2.0" ]
3,562
2018-08-30T05:26:10.000Z
2022-03-31T10:01:56.000Z
docs/zh-CN/sql-reference/sql-statements/Administration/CREATE CLUSTER.md
kaiker19/incubator-doris
f4c5c6ccc650012a0db7ddda8a38f4c65cc5c9be
[ "Apache-2.0" ]
5,199
2018-09-11T07:57:21.000Z
2022-03-31T16:17:50.000Z
docs/zh-CN/sql-reference/sql-statements/Administration/CREATE CLUSTER.md
kaiker19/incubator-doris
f4c5c6ccc650012a0db7ddda8a38f4c65cc5c9be
[ "Apache-2.0" ]
1,234
2018-08-31T09:34:54.000Z
2022-03-31T06:01:02.000Z
--- { "title": "CREATE CLUSTER", "language": "zh-CN" } --- <!-- Licensed to the Apache Software Foundation (ASF) under one or more contributor license agreements. See the NOTICE file distributed with this work for additional information regarding copyright ownership. The ASF licenses this file to you under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. --> # CREATE CLUSTER ## description 该语句用于新建逻辑集群 (cluster), 需要管理员权限。如果不使用多租户,直接创建一个名称为default_cluster的cluster。否则创建一个自定义名称的cluster。 语法 CREATE CLUSTER [IF NOT EXISTS] cluster_name PROPERTIES ("key"="value", ...) IDENTIFIED BY 'password' 1. PROPERTIES 指定逻辑集群的属性 PROPERTIES ("instance_num" = "3") instance_num 逻辑集群节点树 2. identified by ‘password' 每个逻辑集群含有一个superuser,创建逻辑集群时必须指定其密码 ## example 1. 新建一个含有3个be节点逻辑集群 test_cluster, 并指定其superuser用户密码 CREATE CLUSTER test_cluster PROPERTIES("instance_num"="3") IDENTIFIED BY 'test'; 2. 新建一个含有3个be节点逻辑集群 default_cluster(不使用多租户), 并指定其superuser用户密码 CREATE CLUSTER default_cluster PROPERTIES("instance_num"="3") IDENTIFIED BY 'test'; ## keyword CREATE,CLUSTER
26.15873
97
0.725728
eng_Latn
0.719405
daeb0d1539da9d6755b0b44a11f87a8714a22f2c
5,274
md
Markdown
README.md
OpenDGPS/odgps-station
a9a4da038c890db63032b3c08a9888e45d852439
[ "Apache-2.0" ]
2
2020-12-08T13:13:00.000Z
2020-12-17T11:15:00.000Z
README.md
OpenDGPS/odgps-station
a9a4da038c890db63032b3c08a9888e45d852439
[ "Apache-2.0" ]
null
null
null
README.md
OpenDGPS/odgps-station
a9a4da038c890db63032b3c08a9888e45d852439
[ "Apache-2.0" ]
null
null
null
# odgps-station Setup and configuration for a OpenDGPS reference station This repository bundles all software and configuration files to setup an existing environment to act as a reference station for OpenDGPS. The overall goal is to help any participant to run a number of configuration steps specific for supported hardware, software, and network environment. As described in [OpenDGPS/opendgps-doc](/opendgps/opendgps-doc) the role of a reference station is to provide the differential data to users nearby to enable them to remmove GNSS signal distortions from actual data to calculate its own position with a much higher accuracy. The accuracy based on differential data from reference stations depends on the distance and number of the stations from the receiving mobile device. Typically differntial data from reference stations more than 30km away may have no effect. Weather and area also have impact. If a new (or moved) reference station is registered on the OpenDGPS network it should be calibrated. The first calibration phase will be done automatically in the first three hours without any interaction with the network. If the administrator of the reference station confirms the coordinates of this calculation, the reference station tries to register on the OpenDGPS network with the configured API-key. If the registration is successful the station will try to get differential data to precise it's own position. Until this process is finished the station will marked as callibration level 'SELF' to permit it from sending the data for callibration purposes to other devices. After 24h and a heuristics analysis if the data match stability tresholds the station gets the callibration level 'BASIC' and is accepted to provide data to other user devices. The heuristic analysis will done periodically to ensure the antenna was not moved physically (i.e. by extrem weather conditions). A station can become a 'PROOVEN' calibration level by a process where the position of the antenna and the quality (mainly stability) of the installation is manually examined by other member of the OpenDGPS network in person. Differential data from PROOVEN reference stations have a higher impact by the calculation of the differential data provided to the user. ### Notes about privacy The OpenDGPS network is not sending the exact locations of any reference stations to end-users device. If a request comes in from a device – either a mobile phone or another reference station – the server of the OpenDGPS network picks one or more – depends on the RTCM protocol – pseudolocations and calculates the differential data based on the real existing reference stations registered for this area. Due to the randomness in theory the location of the pseudo reference station can be exact the same as a real station but the next request will always provide another position. If a station is registered as 'PRIVATE' the location data of this station will be encrypted in the database and additionally a fuzzy location will be stored. To fuzzy the original data the latitude and longitude will be rounded to five positions after decimal point. In effect the reference station can be anywhere in an area of around 6km east-west and 10km north-south. The station can still be used as a reference station by checking if this area is relevant for given request and if yes to decrypt the data on the fly using the differential data in the processors register and clear the register immediately. ## Usage Basically the setup is splitted in three parts to calculate the differential signal. First the OS needs to be able to communicate with the GNSS receiver, second the RTKLIB have to be up and running and third the station needs connectivity to the OpenDGPS network. ## Supported environments A reference station (called node) for OpenDGPS is build from a computational system and a GNSS receiver (widely named as GPS-mouse). ### OpenDGPS nodes A OpenDGPS node is a commputer running Linux and powerful enough to do the calculation for RTK (Real Time Kinetic). By this the node calculates the __D__ in OpenDGPS which is the __difference__ between the relative position provided by the satellites and the real (fix) position measured beforehand. Connected to the node is a GNSS receiver (called widely GPS-mouse) which is cappable to give the raw data to the node via serial connection (typically USB). #### Hardware (Boards) |Vendor |Part identifier |Description | --- | --- | --- | | Raspberry | Raspberry Pi 4 | 4 ARMv8 cores with 1GByte of LDRAM | | Texas Instruments | beagle bone | 4 ??? cores with 1GByte of RAM | | parallela | Parallela Board | 18 Core (Epiphany, FPGA, ARMv9) 1GByte RAM #### Hardware (GNSS Receiver) |Vendor |Part identifier |Description --- | --- | --- Myriad | LimeSDR(USB) | Xilinx Artix 7, 100kHz to 6 GHz Rx/Tx full duplex [github: gnss-sdr/GNSS-SDR](https://github.com/gnss-sdr/gnss-sdr) u-blox | LEA M8T | Arduino board to provide the raw data via USB ## Helpful links - [DGPS mit RTKLIB](http://www.archeotech.de/DGPS-mit-RTKLIB/) Describes (in german) how to setup an u-blox receiver with reference data from the german public SAPOS service. - [RTKLIB auf dem Raspberry Pi](http://www.archeotech.de/rtklib-auf-raspberrypi/)
99.509434
681
0.795032
eng_Latn
0.999209
daeb5451f15c2ae14878ed838de8ae0b6b8fbfef
1,084
md
Markdown
README.md
RandomDeveloperM/centrifuge
f2d57b696dbc2204054c74c8695bf697f27eef80
[ "MIT" ]
1
2018-05-03T06:14:03.000Z
2018-05-03T06:14:03.000Z
README.md
RandomDeveloperM/centrifuge
f2d57b696dbc2204054c74c8695bf697f27eef80
[ "MIT" ]
null
null
null
README.md
RandomDeveloperM/centrifuge
f2d57b696dbc2204054c74c8695bf697f27eef80
[ "MIT" ]
null
null
null
## Centrifuge **Work in progress**. Not ready for production yet. Contributions are welcome. This library represents real-time core for Centrifugo server. This is also aimed to be a standalone general real-time messaging library for Go programming language. Message transports: * Websocket transport using JSON or binary Protobuf protocol * GRPC bidirectional streaming over HTTP/2 (Protobuf only) * SockJS polyfill library support (JSON only) Features: * Fast and optimized for low-latency communication with thousands of client connections * Scaling to many nodes with Redis PUB/SUB, built-in Redis sharding, Sentinel for HA * Presence information for channels (show all active clients in channel) * History information for channels (last messages sent into channel) * Join/leave events for channels (client goes online/offline) * Message recovery mechanism to survive short network disconnects * Bidirectional asynchronous message communication * RPC support to call custom handlers in your Go code * MIT license [Godoc](https://godoc.org/github.com/centrifugal/centrifuge)
41.692308
164
0.804428
eng_Latn
0.979496
daeb7336206d4e6c2a800fe576abf19fe9a30252
125
md
Markdown
README.md
mbeidler/kata-bank-ocr
2b7b61d7a862cae51979ecbd872b965a45b01445
[ "BSD-3-Clause" ]
null
null
null
README.md
mbeidler/kata-bank-ocr
2b7b61d7a862cae51979ecbd872b965a45b01445
[ "BSD-3-Clause" ]
null
null
null
README.md
mbeidler/kata-bank-ocr
2b7b61d7a862cae51979ecbd872b965a45b01445
[ "BSD-3-Clause" ]
null
null
null
### About ### --- An implementation of the [Bank OCR Kata](http://codingdojo.org/cgi-bin/index.pl?KataBankOCR) in Haskell.
20.833333
104
0.696
kor_Hang
0.31956
daeb78f6e4f61ca1dac200a4a8b2bc2a6f7abde6
5,012
md
Markdown
Azure-RMSDocs/rms-client/mobile-app-faq.md
younggunnerxx/Azure-RMSDocs
7f4d5ad2ed28974d00c61fa0a587e732b229a61a
[ "CC-BY-4.0", "MIT" ]
1
2019-08-29T00:52:00.000Z
2019-08-29T00:52:00.000Z
Azure-RMSDocs/rms-client/mobile-app-faq.md
younggunnerxx/Azure-RMSDocs
7f4d5ad2ed28974d00c61fa0a587e732b229a61a
[ "CC-BY-4.0", "MIT" ]
null
null
null
Azure-RMSDocs/rms-client/mobile-app-faq.md
younggunnerxx/Azure-RMSDocs
7f4d5ad2ed28974d00c61fa0a587e732b229a61a
[ "CC-BY-4.0", "MIT" ]
null
null
null
--- # required metadata title: FAQs for Azure Information Protection app for iOS & Android description: Some frequently asked questions to help you use the Azure Information Protection app for iOS and Android author: cabailey ms.author: cabailey manager: barbkess ms.date: 07/15/2019 ms.topic: conceptual ms.collection: M365-security-compliance ms.service: information-protection ms.custom: askipteam ms.assetid: 539b4ff8-5d3b-4c4d-9c84-c14da83ff76d # optional metadata #audience: #ms.devlang: ms.reviewer: esaggese ms.suite: ems #ms.tgt_pltfrm: ms.custom: user --- # FAQs for Microsoft Azure Information Protection app for iOS and Android *Applies to: Active Directory Rights Management Services, [Azure Information Protection](https://azure.microsoft.com/pricing/details/information-protection)* This page provides answers to frequently asked questions about the Azure Information Protection app for iOS and Android. ## What can I do with the Azure Information Protection app? This app lets you view rights-protected email messages (.rpmsg files) if your email app doesn't natively support rights management data protection. This app also lets you view rights-protected PDF documents, and pictures and text files that are rights-protected. Because this app is a viewer, you can't use it to create new protected email messages, reply to them, or create or edit protected files. In addition, the app can't open attachments for the files that you view. For example, attachments in protected PDF documents or in rights-protected email messages. ## Can I open PDF files that are in SharePoint protected libraries and OneDrive for Business? Yes, you can open protected PDF files that others have shared with you via SharePoint and OneDrive for Business. Tap the link, and choose this app to open the file for you. This app can also open PDF files that have been protected outside SharePoint and OneDrive for Business (protected PDF and .ppdf files). ## Can my mobile device run the Azure Information Protection app? The Azure Information Protection app requires a minimum version of **iOS 8** or **Android 4.4**. If you have these versions or higher, you can install the app to run on your mobile device: - If your mobile device is managed by Microsoft Intune, you might be able to install the Azure Information Protection app from your company portal. - If your mobile device is not managed by Microsoft Intune or the Azure Information Protection app isn't available from your company portal, you can install the app either directly from the iTunes store and the Google Play store, or by clicking the iOS or Android icon from the **Mobile Devices** section on the [Azure Information Protection download page](https://portal.azurerms.com/#/download). ## How do I get started with the viewer app? After you've installed the app, you don't need to do anything more at that point. Wait until you get a protected email or file that you want to view, and then choose the **AIP Viewer** to open it. You will then be asked to sign in with your work or school account, or prompted to select a certificate. After these credentials are authenticated, you can then read the contents. However, if you don't want to wait, you can use the following instructions to send yourself a protected email or file to view: [Get started with the Microsoft Azure Information Protection app for iOS and Android](mobile-app-get-started.md) ## What credentials should I use to sign in to this app? If your organization already has AD RMS on-premises (with the mobile device extension) or uses Azure Information Protection, use your work credentials to sign in. If your personal email address was used to protect the file, use credentials from a free [Microsoft account](https://signup.live.com) to sign in. ## Can I sign up for the free account with my personal email address, such as a Hotmail or Gmail account? Yes, when you apply for a Microsoft account, you can specify your Hotmail or Gmail email address, or any other email address that you own. However, although this viewer can open protected files with this account, not all applications can open protected content when a Microsoft account is used for authentication. [More information](../secure-collaboration-documents.md#supported-scenarios-for-opening-protected-documents) ## Which file extensions can I open with this app? You can open .rpmsg, .pdf, .ppdf, .pjpg, .pjpeg, .ptiff, .ppng, .ptxt, .pxml, and several other text and image file formats. For the full list of text and image file name extensions, see the first table in the [Supported file types for classification and protection](clientv2-admin-guide-file-types.md#supported-file-types-for-classification-and-protection) section from the admin guide. ## How do I provide feedback about this app? In the app, go to **Settings** > **Send feedback**. ## My question has not been answered—what should I do? Post your question on our [Yammer site](https://www.yammer.com/AskIPTeam).
57.609195
398
0.786313
eng_Latn
0.99521
daebc28f3e48e977fc483b351725e2544317c67e
995
md
Markdown
README.md
ginaldoterencio/fullscreen-scrolling
39d3961c0489ed92ff9e118ce6af64a35ca5f255
[ "MIT" ]
null
null
null
README.md
ginaldoterencio/fullscreen-scrolling
39d3961c0489ed92ff9e118ce6af64a35ca5f255
[ "MIT" ]
null
null
null
README.md
ginaldoterencio/fullscreen-scrolling
39d3961c0489ed92ff9e118ce6af64a35ca5f255
[ "MIT" ]
null
null
null
# fullscreen-scrolling [![Build Status](https://secure.travis-ci.org/user/fullscreen-scrolling.png?branch=master)](http://travis-ci.org/user/fullscreen-scrolling) ## Installation Install with [Bower](http://bower.io): ``` bower install --save fullscreen-scrolling ``` The component can be used as a Common JS module, an AMD module, or a global. ## API ### fullscreen-scrolling() ## Testing Install [Node](http://nodejs.org) (comes with npm) and Bower. From the repo root, install the project's development dependencies: ``` npm install bower install ``` Testing relies on the Karma test-runner. If you'd like to use Karma to automatically watch and re-run the test file during development, it's easiest to globally install Karma and run it from the CLI. ``` npm install -g karma karma start ``` To run the tests in Firefox, just once, as CI would: ``` npm test ``` ## Browser support * Google Chrome (latest) * Opera (latest) * Firefox 4+ * Safari 5+ * Internet Explorer 8+
17.767857
139
0.720603
eng_Latn
0.894168
daec0864f01f50d39cdb9feb0b3084de0aa15d64
731
md
Markdown
eosgi-project-archetype/src/main/resources/archetype-resources/_distro/README.md
njbartlett/EOSGi-Maven
377606f24a1506b7e38f2366886f73d4d84df151
[ "Apache-2.0" ]
1
2019-07-03T21:06:40.000Z
2019-07-03T21:06:40.000Z
eosgi-project-archetype/src/main/resources/archetype-resources/_distro/README.md
njbartlett/EOSGi-Maven
377606f24a1506b7e38f2366886f73d4d84df151
[ "Apache-2.0" ]
null
null
null
eosgi-project-archetype/src/main/resources/archetype-resources/_distro/README.md
njbartlett/EOSGi-Maven
377606f24a1506b7e38f2366886f73d4d84df151
[ "Apache-2.0" ]
null
null
null
Distro Module ============= This module defines a distribution (or *distro*), which is a selection of runtime dependencies from which an application may be assembled. The contents of the distro should include bundles that are not necessarily required at compile time by any application bundle, but may be required at runtime. Examples include implementation bundles for OSGi specifications such as SCR (the runtime for Declarative Services), Event Admin, Configuration Admin, the Felix Gogo shell, etc. Note that including a bundle in the distro does not mean it will be included in all application assemblies. It just makes that bundle available for inclusion, and it defines the version(s) of those bundles that are available.
91.375
335
0.798906
eng_Latn
0.99988
daed3e088587541262d63c0c3d0e0f3c124ea1c1
417
md
Markdown
services/frontend/README.md
jonny-rimek/wowmate
608f34f034625c15e46ae4a514a29184bce089f7
[ "MIT" ]
7
2021-03-30T20:45:35.000Z
2022-01-18T09:19:07.000Z
services/frontend/README.md
jonny-rimek/wowmate
608f34f034625c15e46ae4a514a29184bce089f7
[ "MIT" ]
169
2021-03-29T22:01:51.000Z
2021-06-09T12:35:57.000Z
services/frontend/README.md
jonny-rimek/wowmate
608f34f034625c15e46ae4a514a29184bce089f7
[ "MIT" ]
null
null
null
# frontend > frontend for wowmate.io ## Build Setup ```bash # install dependencies $ yarn install # serve with hot reload at localhost:3000 $ yarn dev # build for production and launch server $ yarn build ``` If building for dev environment set environment variable `NUXT_ENV=dev` if it is not set it'll build for prod For detailed explanation on how things work, check out [Nuxt.js docs](https://nuxtjs.org).
19.857143
90
0.745803
eng_Latn
0.986658
daee29f670f556b1344acfc069fb6ad39c97bc9e
161
markdown
Markdown
README.markdown
noemie-git/Drum-Machine
8f029ae56e85c6ccde9a127faeb30e0eb7819849
[ "MIT-0", "MIT" ]
null
null
null
README.markdown
noemie-git/Drum-Machine
8f029ae56e85c6ccde9a127faeb30e0eb7819849
[ "MIT-0", "MIT" ]
null
null
null
README.markdown
noemie-git/Drum-Machine
8f029ae56e85c6ccde9a127faeb30e0eb7819849
[ "MIT-0", "MIT" ]
null
null
null
# Drum Machine A Pen created on CodePen.io. Original URL: [https://codepen.io/noemie_brancourt/pen/oNpBbjG](https://codepen.io/noemie_brancourt/pen/oNpBbjG).
26.833333
142
0.770186
yue_Hant
0.37833