cisco fpr1140 ftd smart license issue

If you got one notification as follow:

Product Instance Failed to Connect – The device “UDI_PID:FPR-1140; UDI_SN:xxxxxxxxxxxx; ” in the virtual account “DEFAULT” has not connected within its renewal period, and may run in a degraded state if it does not connect within the next 1 day. If the device is not going to connect, you can remove it to immediately release the licenses it is consuming.

try login system run command:

 

root@fw:/home/admin# pmtool restartbyid tomcat
root@fw:/home/admin# pmtool status | grep "tomcat"
then wait a quite long time to the http portal recover to access

 

squid

acl localnet src 192.168.0.0/16 # RFC 1918 local private network (LAN)
acl localnet src fc00::/7 # RFC 4193 local private network range
acl localnet src fe80::/10 # RFC 4291 link-local (directly plugged) machines

acl SSL_ports port 443
acl Safe_ports port 80 # http
acl Safe_ports port 21 # ftp
acl Safe_ports port 443 # https
acl Safe_ports port 70 # gopher
acl Safe_ports port 210 # wais
acl Safe_ports port 1025-65535 # unregistered ports
acl Safe_ports port 280 # http-mgmt
acl Safe_ports port 488 # gss-http
acl Safe_ports port 591 # filemaker
acl Safe_ports port 777 # multiling http

http_access deny !Safe_ports

http_access deny CONNECT !SSL_ports

http_access allow localhost manager
http_access deny manager

http_access allow localhost
acl hasRequest has request
logformat custom_log %{%Y-%m-%d %H:%M:%S}tl %>a:%>p %Ss/%03>Hs:%Sh “%rm %ru HTTP/%rv” %mt %>Hs %<st %tr “%{User-Agent}>h” “%{Referer}>h”
access_log /var/log/squid/access.log custom_log hasRequest

http_access deny to_localhost

http_access deny to_linklocal

http_access allow localnet
http_access deny all
http_port 3128
maximum_object_size 1 GB
cache_dir ufs /var/spool/squid 10240 16 256
cache_mem 256 MB
maximum_object_size_in_memory 4 MB
cache_replacement_policy heap LFUDA
range_offset_limit -1
quick_abort_min -1 KB

coredump_dir /var/spool/squid

refresh_pattern -i \.7z$ 300 90% 14320 reload-into-ims
refresh_pattern -i \.x03$ 300 90% 14320 reload-into-ims
refresh_pattern -i \.m30$ 300 90% 14320 reload-into-ims
refresh_pattern -i \.m35$ 300 90% 14320 reload-into-ims
refresh_pattern -i \.zip$ 300 90% 14320 reload-into-ims
refresh_pattern -i \.irn$ 300 90% 14320 reload-into-ims
refresh_pattern ^ftp: 1440 20% 10080
refresh_pattern ^gopher: 1440 0% 1440
refresh_pattern -i symantecliveupdate.com/.* 1440 90% 43200
refresh_pattern -i symantec.com/.* 1440 90% 43200
refresh_pattern -i (/cgi-bin/|\?) 0 0% 0
refresh_pattern . 0 20% 4320
httpd_suppress_version_string on
via off

elastic 8.13.2

For setup syslog to accept from netowrk device, the configration of filebeat as below:
filebeat.yml
filebeat.inputs:
- type: syslog
protocol.tcp:
host: "0.0.0.0:5014"
fields:
type: syslog
fields_under_root: true
setup.template.settings:
index.number_of_shards: 1
setup.kibana:
output.logstash:
hosts: ["x.x.x.x:10083"]
enabled: true
processors:
- add_host_metadata:
when.not.contains.tags: forwarded
- add_cloud_metadata: ~
- add_docker_metadata: ~
- add_kubernetes_metadata: ~

For logstash pipeline setting you should like do this kinds of configuration:

input {
beats {
port => 5044
}
}

filter {
date {
match => [ "@timestamp", "yyyy-MM-dd HH:mm:ss Z" ]
}
mutate {
remove_field => ["@version", "_index", "_source", "ecs"]
}

if [type] == "syslog" {
grok {
match => { "message" => "<%{NONNEGINT:syslog_pri}>%{NONNEGINT:version}%{SPACE}(?:-|%{TIMESTAMP_ISO8601:syslog_timestamp})%{SPACE}(?:-|%{IPORHOST:hostname})%{SPACE}(?:%{SYSLOG5424PRINTASCII:program}|-)%{SPACE}(?:-|%{SYSLOG5424PRINTASCII:process_id})%{SPACE}(?:-|%{SYSLOG5424PRINTASCII:message_id})%{SPACE}(?:-|(?(\[.*?[^\\]\])+))(?:%{SPACE}%{GREEDYDATA:syslog_message}|)"}
match => { "message" => "(<%{NUMBER:syslog_event_id}>)?%{SYSLOGTIMESTAMP:syslog_timestamp} (%{SYSLOGHOST:syslog_hostname} )?%{DATA:syslog_program}(?:\[%{POSINT:syslog_pid}\])?:%{GREEDYDATA:syslog_message}" }
match => { "message" => "(<%{NUMBER:syslog_event_id}>)?%{TIMESTAMP_ISO8601:syslog_timestamp} (%{SYSLOGHOST:syslog_hostname} )?%{DATA:syslog_program}(?:\[%{POSINT:syslog_pid}\])?:%{GREEDYDATA:syslog_message}" }
match => { "message" => "%{SYSLOGTIMESTAMP:syslog_timestamp} %{SYSLOGHOST:syslog_hostname} %{DATA:syslog_program}(?:\[%{POSINT:syslog_pid}\])?: %{GREEDYDATA:syslog_message}" }
add_tag => [ "syslog" ]
}
mutate {
add_field => { "[@metadata][target_index]" => "syslog-%{+YYYY.MM.dd}" }
}
date {
match => [ "syslog_timestamp", "MMM d HH:mm:ss", "MMM dd HH:mm:ss" ]
}
}

if [event][module] == "nginx" {
grok {
match => [ "message" , "%{COMMONAPACHELOG}+%{GREEDYDATA:extra_fields}"]
overwrite => [ "message" ]
}
mutate {
convert => ["response", "integer"]
convert => ["bytes", "integer"]
convert => ["responsetime", "float"]
add_field => { "[@metadata][target_index]" => "nginx-%{+YYYY.MM.dd}" }
}
geoip {
source => "address"
target => "clientgeo"
add_tag => ["nginx-geoip"]
}
}

if [event][module] == "auditd" {
grok {
match => { "message" => "type=%{WORD:audit_type} msg=audit\(%{NUMBER:audit_epoch:timestamp}:%{NUMBER:audit_counter}\): pid=%{NUMBER:audit_pid} uid=%{NUMBER:audit_uid} auid=%{NUMBER:audit_audid} ses=%{NUMBER:ses} msg=\'op=%{WORD:operation}:%{WORD:detail_operation} grantors=%{WORD:pam_login},%{WORD:pam_key},%{WORD:pam_limit},%{WORD:pam_system} acct=\"%{WORD:acct_user}\" exe=\"%{GREEDYDATA:exec}\" hostname=%{GREEDYDATA:hostname} addr=%{GREEDYDATA:ipaddr} terminal=%{WORD:terminal} res=%{WORD:result}" }
}
date {
match => [ "audit_epoch", "UNIX_MS" ]
}
mutate {
split => ["host", "."]
add_field => { "hostname" => "%{[host][0]}" }
add_field => { "podName" => "%{[host][1]}" }
add_field => { "ignore" => "%{[host][2]}" }
remove_field => ["ignore", "host"]
add_field => { "[@metadata][target_index]" => "audit-%{+YYYY.MM.dd}" }
}
}
if [container][id] =~ /service/ {
mutate {
add_field => { "[@metadata][target_index]" => "%{[container][id]}-%{+YYYY.MM.dd}" }
}
}
}

output {
elasticsearch {
hosts => ["https://x.x.x.x:9200"]
user => "elastic"
password => "xxxxxxxx"
#data_stream => "true"
#data_stream_dataset => "xxxxxx"
index => "%{[@metadata][target_index]}"
ssl_enabled => "true"
#ssl => true
#ssl_certificate_verification => true
#cacert => "/usr/share/logstash/config/ca.pem"
ssl_verification_mode => "none"
ssl_certificate_authorities => "/usr/share/logstash/config/ca.pem"
ca_trusted_fingerprint => "xxxxxxxxxxxxxxxxxxxxxxxxxx"
}
}

ORA-01210: data file header is media corrupt

bbed parfile=parfile.txt

parfile.txt
blocksize=8192
listfile=file.txt
mode=edit
password=blockedit

file.txt
1 /opt/app/oradata/test/system01.dbf 8178892
2 /opt/app/oradata/test/sysaux01.dbf 2936012
3 /opt/app/oradata/test/undotbs01.dbf 9017753
4 /opt/app/oradata/test/users01.dbf 5242880
5 /opt/app/oradata/test/test01.dbf 2547200

SQL> select checkpoint_change# from v$datafile_header;

CHECKPOINT_CHANGE#
——————
120020207
120020207
120020207
120020207
120000000

BBED> info
BBED> p kcvfhckp
BBED> d /v dba 1,1 offset 484 count 16
BBED> assign dba 5,1 kcvfh.kcvfhckp.kcvcpscn.kscnbas = dba 1,1 kcvfh.kcvfhckp.kcvcpscn.kscnbas
BBED> d /v dba 5,1 offset 484 count 16
BBED> set dba 5,1
BBED> sum apply

SQL> recover datafile 5;
Connected to an idle instance.

SQL> startup
ORACLE instance started.

Total System Global Area 3340451840 bytes
Fixed Size 2217952 bytes
Variable Size 2499807264 bytes
Database Buffers 822083584 bytes
Redo Buffers 16343040 bytes
Database mounted.
Database opened.

mixed content to https mixing with http

Mixed Content: The page at ‘‘ was loaded over HTTPS, but requested an insecure element ‘‘. This request was automatically upgraded to HTTPS, For more information see

adding configration to nginx.conf
add_header Content-Security-Policy “upgrade-insecure-requests;connect-src *”;

save excel file to csv with double quotes

add the code to vbs
Sub CSVFile()
'updateby Extendoffice
Dim xRg As Range
Dim xRow As Range
Dim xCell As Range
Dim xStr As String
Dim xSep As String
Dim xTxt As String
Dim xName As Variant
On Error Resume Next
If ActiveWindow.RangeSelection.Count > 1 Then
xTxt = ActiveWindow.RangeSelection.AddressLocal
Else
xTxt = ActiveSheet.UsedRange.AddressLocal
End If
Set xRg = Application.InputBox("Please select the data range:", "Kutools for Excel", xTxt, , , , , 8)
If xRg Is Nothing Then Exit Sub
xName = Application.GetSaveAsFilename("", "CSV File (*.csv), *.csv")
xSep = Application.International(xlListSeparator)
Open xName For Output As #1
For Each xRow In xRg.Rows
xStr = ""
For Each xCell In xRow.Cells
xStr = xStr & """" & xCell.Value & """" & xSep
Next
While Right(xStr, 1) = xSep
xStr = Left(xStr, Len(xStr) - 1)
Wend
Print #1, xStr
Next
Close #1
If Err = 0 Then MsgBox "The file has saved to: " & xName, vbInformation, "Kutools for Excel"
End Sub